Compare commits
1216 Commits
v0.15.1-sh
...
features/s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afe1fd89b9 | ||
|
|
1452020f22 | ||
|
|
9afff8eb60 | ||
|
|
c6cd52f616 | ||
|
|
8bbfbc741d | ||
|
|
3f31fffe65 | ||
|
|
fa023354c6 | ||
|
|
02bd3d55a0 | ||
|
|
e58d4f8cb7 | ||
|
|
37a77e0d12 | ||
|
|
f10864b96e | ||
|
|
19e226259c | ||
|
|
102b91203a | ||
|
|
04ca718051 | ||
|
|
61abc75bc6 | ||
|
|
86eececc5c | ||
|
|
7551b66cd9 | ||
|
|
c9a00562c4 | ||
|
|
dafda1ab1a | ||
|
|
d6b9871169 | ||
|
|
b6d1704729 | ||
|
|
f49fa74bf7 | ||
|
|
0b8bc43fb0 | ||
|
|
70eb1960fb | ||
|
|
a7c109c3aa | ||
|
|
da62f89f4a | ||
|
|
d9f0170024 | ||
|
|
171ebd8189 | ||
|
|
6d986b4478 | ||
|
|
03569dee8d | ||
|
|
e2ddd7846c | ||
|
|
32693fa573 | ||
|
|
e221ba6ba7 | ||
|
|
4146c3d135 | ||
|
|
f5b165a76f | ||
|
|
3508362dde | ||
|
|
fb145df4f4 | ||
|
|
fd46f67d63 | ||
|
|
edf3e91a12 | ||
|
|
bca630a22a | ||
|
|
5ff9ba320d | ||
|
|
50640d4924 | ||
|
|
f5cfcadfc5 | ||
|
|
a5c534b86d | ||
|
|
99364b9c3f | ||
|
|
749ab2e79d | ||
|
|
1b3e1897ca | ||
|
|
3976b2a083 | ||
|
|
54603cb91f | ||
|
|
aa630b8d71 | ||
|
|
77acf8ddc2 | ||
|
|
dfb02e6d45 | ||
|
|
cf4a0cbc01 | ||
|
|
b0eb02a86f | ||
|
|
73f76bc1b5 | ||
|
|
6f39d8011e | ||
|
|
97dc74c727 | ||
|
|
d53eefa69f | ||
|
|
bae57f2ae8 | ||
|
|
ba58ae9118 | ||
|
|
fdb8a59bae | ||
|
|
d92f52ae02 | ||
|
|
3229bf04f5 | ||
|
|
ccf519daa5 | ||
|
|
c5ae92bf3f | ||
|
|
f83280cb58 | ||
|
|
6e80de652c | ||
|
|
0dc212e67d | ||
|
|
3ce2efe32a | ||
|
|
76ce5d90ec | ||
|
|
e5a9a376bf | ||
|
|
d6a497540d | ||
|
|
b996d65a96 | ||
|
|
991a2aae37 | ||
|
|
8ba45e358b | ||
|
|
28e76be185 | ||
|
|
70e91cc1e0 | ||
|
|
b52113aca9 | ||
|
|
ce06e24a2e | ||
|
|
dd0fbe670c | ||
|
|
6ad70b5f5d | ||
|
|
dadf4d1ed9 | ||
|
|
64bac977f1 | ||
|
|
2f1d26fa87 | ||
|
|
cf713c5320 | ||
|
|
035e7b3743 | ||
|
|
473457f2ba | ||
|
|
490bca73d1 | ||
|
|
59e885bd4f | ||
|
|
966fc427a9 | ||
|
|
8a34511789 | ||
|
|
8f255f9e6a | ||
|
|
4d282ad4d9 | ||
|
|
7216451ba7 | ||
|
|
e614cdf007 | ||
|
|
bc486a961c | ||
|
|
a13eab94ce | ||
|
|
6574c6779b | ||
|
|
d2cfbf177d | ||
|
|
bfb97e4d57 | ||
|
|
4151224ef2 | ||
|
|
e294a1e0a6 | ||
|
|
9eb87d1026 | ||
|
|
394a23d392 | ||
|
|
3c418d9faa | ||
|
|
4277bc6429 | ||
|
|
aedc056f9a | ||
|
|
562f504000 | ||
|
|
8fac02e437 | ||
|
|
b578d55d12 | ||
|
|
bbb6b14540 | ||
|
|
92b8177b77 | ||
|
|
acf4dc2e12 | ||
|
|
a23d67f7ea | ||
|
|
0d0ba79bfb | ||
|
|
435c8862a6 | ||
|
|
84dbf6948e | ||
|
|
9558377f0f | ||
|
|
9e906e2d47 | ||
|
|
7d0ae0f295 | ||
|
|
a078e2ba13 | ||
|
|
30d24db116 | ||
|
|
a1e19de8e1 | ||
|
|
bdefac6964 | ||
|
|
b272e00d6a | ||
|
|
0dd7f140b0 | ||
|
|
165dbf719d | ||
|
|
5565b6494d | ||
|
|
88187bc63c | ||
|
|
678e9f5ad2 | ||
|
|
7d5ddb1fc6 | ||
|
|
489377d051 | ||
|
|
458dc81878 | ||
|
|
f570d7a45c | ||
|
|
36e9c1eba3 | ||
|
|
fcb4dfc307 | ||
|
|
fff2f34de8 | ||
|
|
e36498cb46 | ||
|
|
6198963ede | ||
|
|
b44cf08cb2 | ||
|
|
7585b37865 | ||
|
|
58fb6cdaad | ||
|
|
6e82776773 | ||
|
|
71c7e28ca7 | ||
|
|
2e4892c111 | ||
|
|
44c7826892 | ||
|
|
8cb1192050 | ||
|
|
a4cdf664c6 | ||
|
|
403ea4384e | ||
|
|
7d0a46c051 | ||
|
|
a8b6faf430 | ||
|
|
275583c02f | ||
|
|
80fe51046c | ||
|
|
e329e13f32 | ||
|
|
4b41d56bc3 | ||
|
|
f9699fd3ff | ||
|
|
69d8417d8a | ||
|
|
01df552149 | ||
|
|
b66d756da6 | ||
|
|
7c01c64d53 | ||
|
|
2576d8d767 | ||
|
|
0f332c73a6 | ||
|
|
91bd99b4b2 | ||
|
|
7fb83047fd | ||
|
|
9a5f043ce6 | ||
|
|
f95d7959b2 | ||
|
|
91649763ab | ||
|
|
7c23498f1d | ||
|
|
635b8243fe | ||
|
|
0d5c065678 | ||
|
|
cb218058bc | ||
|
|
0b3e860608 | ||
|
|
2f0565de64 | ||
|
|
e41c3ad1fc | ||
|
|
1a9f97fd0d | ||
|
|
6a3583f181 | ||
|
|
bad9f2bc28 | ||
|
|
1b1bfd883a | ||
|
|
5cff304809 | ||
|
|
6752a1c377 | ||
|
|
33e8dcad99 | ||
|
|
5e6875008c | ||
|
|
5f0c3427ae | ||
|
|
2af9e44340 | ||
|
|
b3ee04c6ef | ||
|
|
f7030287d3 | ||
|
|
445988011c | ||
|
|
ef37852bb4 | ||
|
|
40a66317e8 | ||
|
|
792c48a558 | ||
|
|
0448ade7b5 | ||
|
|
defde398c4 | ||
|
|
77d20906c0 | ||
|
|
682223f1f4 | ||
|
|
60a5a176fb | ||
|
|
c59836222e | ||
|
|
4fc2370559 | ||
|
|
a2673aeddd | ||
|
|
49512e21ab | ||
|
|
f537d5bb58 | ||
|
|
28ef5b1204 | ||
|
|
031490f6aa | ||
|
|
bf90cdd6c7 | ||
|
|
d9e0718c9d | ||
|
|
e686f1500e | ||
|
|
e18612a321 | ||
|
|
2386f7582a | ||
|
|
ace52bd476 | ||
|
|
4ca7d46e15 | ||
|
|
5f47170492 | ||
|
|
41b68741ec | ||
|
|
a250006449 | ||
|
|
3cfce42563 | ||
|
|
c29b74e7ad | ||
|
|
58f101de88 | ||
|
|
a734dabf2b | ||
|
|
85f7a8bf71 | ||
|
|
10dab474ce | ||
|
|
e355fc16ad | ||
|
|
3211ac5136 | ||
|
|
cde4654525 | ||
|
|
6d39e6ebea | ||
|
|
128731ec74 | ||
|
|
8116153f2a | ||
|
|
afb0883762 | ||
|
|
8ad2cc2acf | ||
|
|
e7040467f2 | ||
|
|
1aceb38b89 | ||
|
|
4a474d9d67 | ||
|
|
29645ceba5 | ||
|
|
3fabdb6e9b | ||
|
|
ff6ca57dda | ||
|
|
114317464b | ||
|
|
757dad370f | ||
|
|
2bc9821c43 | ||
|
|
34f4049815 | ||
|
|
e13e2b0d54 | ||
|
|
25291cf01c | ||
|
|
778e659a03 | ||
|
|
d4535f3115 | ||
|
|
fc919e490e | ||
|
|
651cffae0a | ||
|
|
87dc324f36 | ||
|
|
b014ffcd3d | ||
|
|
7b2c59e6cf | ||
|
|
9eac1ed6a8 | ||
|
|
691e46c4f5 | ||
|
|
f1652e89af | ||
|
|
9c8cfcca0f | ||
|
|
857530c5ae | ||
|
|
1ae50f8022 | ||
|
|
f7d1f845f4 | ||
|
|
fa04ad5d92 | ||
|
|
3dedd2e321 | ||
|
|
4c5151327f | ||
|
|
a18700a86a | ||
|
|
290b77fe43 | ||
|
|
4472914847 | ||
|
|
8494d26c0a | ||
|
|
ba47a057f0 | ||
|
|
96364235e3 | ||
|
|
4307b73299 | ||
|
|
d24532f574 | ||
|
|
346d12dc6c | ||
|
|
d11705f9d0 | ||
|
|
e3cd3fb9eb | ||
|
|
ab51edecb5 | ||
|
|
b1cea5c23e | ||
|
|
275ac86925 | ||
|
|
0f4f4a2e95 | ||
|
|
97b7af01d9 | ||
|
|
0325cb564b | ||
|
|
a0ba89d84a | ||
|
|
a12ced781c | ||
|
|
fe05fc7fd5 | ||
|
|
1ac24aca96 | ||
|
|
e2e90e4d6b | ||
|
|
af189e3ed9 | ||
|
|
520308ad2b | ||
|
|
7165795c4a | ||
|
|
017331684e | ||
|
|
e43855bc8f | ||
|
|
9c4d79f8cd | ||
|
|
2f4d493744 | ||
|
|
88749de5c9 | ||
|
|
6b30cd18d6 | ||
|
|
d721bd8070 | ||
|
|
ccd65895a6 | ||
|
|
94e694b19f | ||
|
|
7205a75427 | ||
|
|
92bf9493cf | ||
|
|
c2b33b4444 | ||
|
|
73110b415d | ||
|
|
850924e423 | ||
|
|
64273da2cc | ||
|
|
28c6ce9714 | ||
|
|
dcee0a1d5d | ||
|
|
03a808ec2d | ||
|
|
138e2ad0a1 | ||
|
|
b2b7bcd86a | ||
|
|
8b7ca5ef50 | ||
|
|
5e86a131d2 | ||
|
|
80691fa6d5 | ||
|
|
8ad581e7b3 | ||
|
|
b494f50489 | ||
|
|
ba257914b3 | ||
|
|
ea57171712 | ||
|
|
704fc475e3 | ||
|
|
61ae21ee4d | ||
|
|
c5e50b73ef | ||
|
|
2cc7718edd | ||
|
|
518372ccd1 | ||
|
|
ca2760381b | ||
|
|
f836d93da1 | ||
|
|
f2adf531b3 | ||
|
|
262edde2c1 | ||
|
|
14983401a1 | ||
|
|
972caba882 | ||
|
|
53bf97298f | ||
|
|
0c03248537 | ||
|
|
8e41208c65 | ||
|
|
f4d3a1a0cb | ||
|
|
f3c4747318 | ||
|
|
597b43e30a | ||
|
|
6fcec1dcff | ||
|
|
0eca977cc9 | ||
|
|
7d9f2bf4ed | ||
|
|
7728b0737b | ||
|
|
fab2622a71 | ||
|
|
741bb9bafe | ||
|
|
098beee295 | ||
|
|
8c264a9f26 | ||
|
|
84381fbc80 | ||
|
|
3a562c0cec | ||
|
|
7c0b356e79 | ||
|
|
ced0a8f068 | ||
|
|
879bb063f7 | ||
|
|
638f44d842 | ||
|
|
6d87cbdb52 | ||
|
|
443407cda5 | ||
|
|
e22a0ca5cf | ||
|
|
e58db067c3 | ||
|
|
8eb375bf81 | ||
|
|
6b20687b26 | ||
|
|
1992fdf712 | ||
|
|
601f97d8a5 | ||
|
|
447ea50bf9 | ||
|
|
ba317b01c3 | ||
|
|
0116c44714 | ||
|
|
24843844ae | ||
|
|
e5fdb2a46c | ||
|
|
712e7bd2bf | ||
|
|
aa8e046073 | ||
|
|
f268f65733 | ||
|
|
1e05321c8f | ||
|
|
d904c57d2b | ||
|
|
1b78a010d8 | ||
|
|
1026ace6b6 | ||
|
|
bf2ded5269 | ||
|
|
5c94827201 | ||
|
|
9e51b8d165 | ||
|
|
d9b945f663 | ||
|
|
0740a4ac7e | ||
|
|
17f7b23783 | ||
|
|
ae3f3887a6 | ||
|
|
ae44a8ff64 | ||
|
|
ea97b37f60 | ||
|
|
e7f1eeb7af | ||
|
|
f9a330ae99 | ||
|
|
12078382b6 | ||
|
|
3a5746c6c7 | ||
|
|
3701633937 | ||
|
|
ebeb8fb8df | ||
|
|
1c67a304c8 | ||
|
|
efff025787 | ||
|
|
28ef5c0e27 | ||
|
|
7926f84022 | ||
|
|
9b6c2e80fe | ||
|
|
b2d2bb694e | ||
|
|
49b47864e9 | ||
|
|
16e3e28cc8 | ||
|
|
712d80955b | ||
|
|
2d718b56ca | ||
|
|
e7447f266a | ||
|
|
bc93632c06 | ||
|
|
7c9fe7bcbd | ||
|
|
fc251e62d1 | ||
|
|
3287050226 | ||
|
|
0d08071e95 | ||
|
|
b2e1036316 | ||
|
|
1d54eb3fee | ||
|
|
7ba20239fb | ||
|
|
b8bf34b348 | ||
|
|
ecc7f19177 | ||
|
|
7036f41ea5 | ||
|
|
97f7378097 | ||
|
|
c4e966f162 | ||
|
|
b0a1a7e9aa | ||
|
|
122a4719ca | ||
|
|
f659927938 | ||
|
|
47ef8d9deb | ||
|
|
f4a37b2dc2 | ||
|
|
86eac24f36 | ||
|
|
18fde34d38 | ||
|
|
c07204d661 | ||
|
|
d73db33005 | ||
|
|
06ad858be2 | ||
|
|
3a0d273dae | ||
|
|
55f490b093 | ||
|
|
1fdaffed3f | ||
|
|
7f9018e893 | ||
|
|
665e5ce0fd | ||
|
|
06b551f98e | ||
|
|
855e77036e | ||
|
|
a43dc51551 | ||
|
|
376d6119ce | ||
|
|
fc6cf29f0f | ||
|
|
884e707810 | ||
|
|
a475ef20dd | ||
|
|
89df727e5f | ||
|
|
e9ea8d1e81 | ||
|
|
66e006608a | ||
|
|
49df20f1ef | ||
|
|
586fbe05b5 | ||
|
|
b4042f23d0 | ||
|
|
cf170b5ff1 | ||
|
|
8b212c7845 | ||
|
|
2204ba18b5 | ||
|
|
e58c7af1a3 | ||
|
|
e19f971b7b | ||
|
|
6739908144 | ||
|
|
4b649b2d2b | ||
|
|
d08d0f2732 | ||
|
|
654f52bc08 | ||
|
|
a206d2aebd | ||
|
|
50d7467715 | ||
|
|
e09d906a35 | ||
|
|
62128f1351 | ||
|
|
2c155d4fe2 | ||
|
|
d5da8e7543 | ||
|
|
eaf843c3e8 | ||
|
|
a1230d1910 | ||
|
|
b799b983bb | ||
|
|
0eeed1f7f7 | ||
|
|
c85dc3a5b4 | ||
|
|
6113be0919 | ||
|
|
48bfffd32c | ||
|
|
25021ec228 | ||
|
|
8a408a6571 | ||
|
|
b27cd9d75d | ||
|
|
eb8ff0bc81 | ||
|
|
aca370a3a2 | ||
|
|
c9fd2983dc | ||
|
|
65fda72d7c | ||
|
|
9525c530d5 | ||
|
|
0bf696a29d | ||
|
|
6bb2dd40b6 | ||
|
|
9b654fe60c | ||
|
|
6ceb3d4be0 | ||
|
|
9befc43708 | ||
|
|
b006123331 | ||
|
|
1d0650b2cb | ||
|
|
abffcefadd | ||
|
|
7dd58c7ed0 | ||
|
|
1251919318 | ||
|
|
d10dff1b89 | ||
|
|
9f5d0c0007 | ||
|
|
77a28b81ac | ||
|
|
ccae9cff3a | ||
|
|
680c1b339a | ||
|
|
a81106757d | ||
|
|
1746799087 | ||
|
|
9fc6bdabef | ||
|
|
22329c4f92 | ||
|
|
7fd8f74c23 | ||
|
|
ba470137c8 | ||
|
|
a6c0b7ab3a | ||
|
|
416afa0059 | ||
|
|
5d3ad70e0a | ||
|
|
1a17921fa5 | ||
|
|
273a158f1b | ||
|
|
1536691bc9 | ||
|
|
080625c3af | ||
|
|
24f2850cab | ||
|
|
b93e65065a | ||
|
|
369e691ec2 | ||
|
|
67e72bf1f5 | ||
|
|
81bb372d75 | ||
|
|
26bab9d4d6 | ||
|
|
96ac5add9d | ||
|
|
02dc84a2b3 | ||
|
|
f7d156af05 | ||
|
|
20d37afafa | ||
|
|
ad437bff8f | ||
|
|
afa907f549 | ||
|
|
878d0b793c | ||
|
|
11a3ac25ac | ||
|
|
b562154423 | ||
|
|
b305990ee8 | ||
|
|
9b07669ab3 | ||
|
|
df51cf20c5 | ||
|
|
cb322300cb | ||
|
|
5aef9f8f83 | ||
|
|
c6736653cd | ||
|
|
d4ef804b15 | ||
|
|
eebcd6b24f | ||
|
|
ac6d7dcdf6 | ||
|
|
c13bc308db | ||
|
|
5e1909c00a | ||
|
|
b885dbcd85 | ||
|
|
9e7029d806 | ||
|
|
6f2f02366a | ||
|
|
d6ca3be63a | ||
|
|
a483b0b9dd | ||
|
|
5ef7d5e792 | ||
|
|
f5102cd582 | ||
|
|
17f7d9f44c | ||
|
|
53cfca1f59 | ||
|
|
3c866294e1 | ||
|
|
b721a2e54b | ||
|
|
a6ca236c26 | ||
|
|
b5e78bce06 | ||
|
|
9fdb945383 | ||
|
|
dc5176fbae | ||
|
|
c743d219aa | ||
|
|
e26e532eb3 | ||
|
|
15facf2171 | ||
|
|
f902202b5e | ||
|
|
27b9727926 | ||
|
|
2e2c06121a | ||
|
|
d65b9ad195 | ||
|
|
b53b9fd17e | ||
|
|
8ce5718922 | ||
|
|
488d8ae747 | ||
|
|
3c6544fcbf | ||
|
|
adc98ae409 | ||
|
|
74d274f02f | ||
|
|
d599e4d9d4 | ||
|
|
febc8ccb15 | ||
|
|
512aa40151 | ||
|
|
c24b838407 | ||
|
|
d222551185 | ||
|
|
07fc495c94 | ||
|
|
3a1334bc6a | ||
|
|
1666db60d0 | ||
|
|
fc3b4657ce | ||
|
|
880ff1d795 | ||
|
|
11b1ce84cf | ||
|
|
6b6f6db431 | ||
|
|
cd06100001 | ||
|
|
ebe5a5652c | ||
|
|
ad9cd25285 | ||
|
|
573ce3fe81 | ||
|
|
1addcff724 | ||
|
|
d5c3b876e0 | ||
|
|
286c3d6cbc | ||
|
|
aa4fc2ac44 | ||
|
|
b9adbac5c6 | ||
|
|
8a02ef4d51 | ||
|
|
c786eb46bb | ||
|
|
306f3a1232 | ||
|
|
ccf94ded67 | ||
|
|
1650824ef5 | ||
|
|
0187884591 | ||
|
|
d9722e2730 | ||
|
|
bc2ff81a64 | ||
|
|
472a32fb41 | ||
|
|
b5db5cf259 | ||
|
|
1c0a92662b | ||
|
|
8e8071ffb6 | ||
|
|
b6321cdfa9 | ||
|
|
2945babdad | ||
|
|
3cbd4c8dcf | ||
|
|
fd0d79ecc5 | ||
|
|
76f3d84a1b | ||
|
|
bc34ab4701 | ||
|
|
bef560636d | ||
|
|
71748a3b7a | ||
|
|
c7096eb537 | ||
|
|
08dd826891 | ||
|
|
00e9e1b3c7 | ||
|
|
1ed70d0e2c | ||
|
|
319d160ed1 | ||
|
|
17d96b615a | ||
|
|
3210d6b64e | ||
|
|
9b65bdeca8 | ||
|
|
d466e9224f | ||
|
|
25d27a38de | ||
|
|
71b7b353d8 | ||
|
|
060731a824 | ||
|
|
264958fc18 | ||
|
|
ad8418fbbf | ||
|
|
b3535d909e | ||
|
|
f582ebbaa0 | ||
|
|
97ca824487 | ||
|
|
12c4b8a73e | ||
|
|
6127a6faf6 | ||
|
|
fdddbc9ae7 | ||
|
|
4f624727ef | ||
|
|
da1135cd19 | ||
|
|
3da40a5c61 | ||
|
|
9c3b4e4d28 | ||
|
|
cc06181ef9 | ||
|
|
1b965ac507 | ||
|
|
525a9f02ca | ||
|
|
1498d076c3 | ||
|
|
9350ecf046 | ||
|
|
022586b11d | ||
|
|
a19ac05d3e | ||
|
|
b51e48732c | ||
|
|
8a8d36d828 | ||
|
|
7618b4e25c | ||
|
|
207eadc1b2 | ||
|
|
f56b521bb5 | ||
|
|
a9116ac95c | ||
|
|
3e7e07f2fd | ||
|
|
4428f59ad5 | ||
|
|
34b7d99bec | ||
|
|
06a0fe9d94 | ||
|
|
bef02e9d89 | ||
|
|
af9a3dc69a | ||
|
|
fd82cff737 | ||
|
|
0ffe64dee0 | ||
|
|
b89c794806 | ||
|
|
2916ebe743 | ||
|
|
0788a69fa8 | ||
|
|
6e1f9b65e4 | ||
|
|
93221f8bf9 | ||
|
|
6fdbdaf5b6 | ||
|
|
6ececa6976 | ||
|
|
46fb456b54 | ||
|
|
395b478b1b | ||
|
|
a6a5708c44 | ||
|
|
9ab8521a1c | ||
|
|
512ef506a7 | ||
|
|
c18167d01f | ||
|
|
73fe3ce158 | ||
|
|
f901947f69 | ||
|
|
2430ac5b7c | ||
|
|
c27e82f0ac | ||
|
|
5eea09d43d | ||
|
|
be046d7341 | ||
|
|
84a16e62d6 | ||
|
|
70419c752d | ||
|
|
bcae9e354b | ||
|
|
a9b1f22ba1 | ||
|
|
a14648ffd0 | ||
|
|
8ade93d09d | ||
|
|
4f14d67537 | ||
|
|
fe718afaa5 | ||
|
|
2dad0e5825 | ||
|
|
32710f1f28 | ||
|
|
35bba4223f | ||
|
|
1d152a44fd | ||
|
|
31f660bd90 | ||
|
|
c0342e4152 | ||
|
|
8398265638 | ||
|
|
3dfd99c57a | ||
|
|
4a2136670d | ||
|
|
f61b14106a | ||
|
|
f5d3b82bcd | ||
|
|
d6587ea365 | ||
|
|
d2f56830f1 | ||
|
|
29818fda00 | ||
|
|
b476f8aa63 | ||
|
|
ecf4829de7 | ||
|
|
764cafc1ce | ||
|
|
091b45c3c7 | ||
|
|
1707448fde | ||
|
|
4d25481473 | ||
|
|
ecbfa5e448 | ||
|
|
c00773521e | ||
|
|
a4b0239635 | ||
|
|
303882834a | ||
|
|
5b63ec8652 | ||
|
|
fc94dde3fc | ||
|
|
c064088cf3 | ||
|
|
c05fa25057 | ||
|
|
8e2f41fe18 | ||
|
|
50f76f6131 | ||
|
|
5f8ab69396 | ||
|
|
aff0e8b592 | ||
|
|
5512340a51 | ||
|
|
7302dd834f | ||
|
|
ae23f33a31 | ||
|
|
105caa7297 | ||
|
|
fa216e5f15 | ||
|
|
075e9428a1 | ||
|
|
3fbbf539d1 | ||
|
|
c024a55da8 | ||
|
|
ad060c7870 | ||
|
|
25a0d89b58 | ||
|
|
015ea82bd5 | ||
|
|
b4ff584bc0 | ||
|
|
193c3535e1 | ||
|
|
d1fae90f84 | ||
|
|
de053ca2ea | ||
|
|
edcc334631 | ||
|
|
12aa6d221c | ||
|
|
f73141c6af | ||
|
|
041e21802e | ||
|
|
80a382c218 | ||
|
|
2f0fd44b97 | ||
|
|
313511bf1d | ||
|
|
09cc89a449 | ||
|
|
db3ecb2fdf | ||
|
|
1fdd19bcc2 | ||
|
|
9e54570b4c | ||
|
|
6cda20472e | ||
|
|
f0c0cd5c3f | ||
|
|
30dc0baa34 | ||
|
|
07422f95de | ||
|
|
18d2682f75 | ||
|
|
6ccc430e8f | ||
|
|
38f2a0a92b | ||
|
|
08beb7aa30 | ||
|
|
00bdff81ae | ||
|
|
84a97d8372 | ||
|
|
9fbfd6b58c | ||
|
|
c322bea8f8 | ||
|
|
1dba0ce81b | ||
|
|
f128e7de54 | ||
|
|
98701279df | ||
|
|
1920d125e8 | ||
|
|
c0d490ffbe | ||
|
|
193e8333fa | ||
|
|
1398038bee | ||
|
|
8c1329958c | ||
|
|
aa79d565b3 | ||
|
|
7c9c486d07 | ||
|
|
6309c5eff5 | ||
|
|
c015b8538d | ||
|
|
6c4c7c4b72 | ||
|
|
3b8a5dea44 | ||
|
|
ab6e74f6ac | ||
|
|
902fac185a | ||
|
|
bad8734316 | ||
|
|
5965522bbe | ||
|
|
e0e73c6ea1 | ||
|
|
c0492efc11 | ||
|
|
4493d31170 | ||
|
|
3b6c16ee9f | ||
|
|
f243291334 | ||
|
|
512fa8e460 | ||
|
|
0642216c31 | ||
|
|
7631013975 | ||
|
|
c6ad321838 | ||
|
|
e83f639c13 | ||
|
|
dfd5da85c3 | ||
|
|
9979aa28ce | ||
|
|
eda170665f | ||
|
|
33ec8cd86a | ||
|
|
f340b81ca8 | ||
|
|
1fc02b801e | ||
|
|
8eef488e2c | ||
|
|
a3a571e5c3 | ||
|
|
d7ae244a14 | ||
|
|
09223e5c0b | ||
|
|
61735f3847 | ||
|
|
aa14c9d1d1 | ||
|
|
2954eb39c9 | ||
|
|
cef729e39c | ||
|
|
61cf6d9c8f | ||
|
|
023a057f20 | ||
|
|
e4a7cb4d60 | ||
|
|
775c14e0c1 | ||
|
|
3978db91dc | ||
|
|
ceed9c4bc0 | ||
|
|
1a11449c86 | ||
|
|
11f2d01051 | ||
|
|
c83236d3a4 | ||
|
|
c1efa09928 | ||
|
|
6fc6f1ea9d | ||
|
|
463aaef092 | ||
|
|
07e426ce00 | ||
|
|
7d348268fb | ||
|
|
cff383802a | ||
|
|
7a5c0e0326 | ||
|
|
0f8ad5be1b | ||
|
|
9a7834949d | ||
|
|
051e124533 | ||
|
|
f3ec1d445d | ||
|
|
9dbe1d7776 | ||
|
|
aee95fe9a9 | ||
|
|
cc0a1283c4 | ||
|
|
a5914cecb4 | ||
|
|
dc321abb72 | ||
|
|
acfa2ea018 | ||
|
|
afaae70855 | ||
|
|
e64e600fbb | ||
|
|
96f5075eb1 | ||
|
|
118948cb0a | ||
|
|
f9ee76a817 | ||
|
|
54dc871524 | ||
|
|
b3dd90b95c | ||
|
|
d77f388a0d | ||
|
|
cb676eab0f | ||
|
|
4eb3558d20 | ||
|
|
b35b950ee2 | ||
|
|
91671be7cc | ||
|
|
8977f7377a | ||
|
|
9d2b60ac0c | ||
|
|
b85cc363c1 | ||
|
|
c3a38e0b14 | ||
|
|
3a02d1a847 | ||
|
|
bd0fb35ff0 | ||
|
|
c203898663 | ||
|
|
23f61ae2b0 | ||
|
|
9c8d4be569 | ||
|
|
7154351860 | ||
|
|
9318029b63 | ||
|
|
c07102ac9f | ||
|
|
1c4b6bad43 | ||
|
|
8e1e3ac8c3 | ||
|
|
a7f1565efb | ||
|
|
1884822b56 | ||
|
|
0cc2377de6 | ||
|
|
ce7aefbb4f | ||
|
|
b1133fab22 | ||
|
|
827ca72c26 | ||
|
|
a67a0e3181 | ||
|
|
8b50433cd7 | ||
|
|
4c97a0ea1c | ||
|
|
a480507a92 | ||
|
|
ac433134e5 | ||
|
|
e199f3f245 | ||
|
|
d560d83b76 | ||
|
|
041fcbfa59 | ||
|
|
53b7f381c7 | ||
|
|
044b9d3e85 | ||
|
|
b25055c7e8 | ||
|
|
33116d730d | ||
|
|
f29dd48101 | ||
|
|
ef3338a49b | ||
|
|
8e6fe883eb | ||
|
|
b94a837760 | ||
|
|
7e6a77d907 | ||
|
|
75f34126fc | ||
|
|
b0eb771b19 | ||
|
|
7498336f3d | ||
|
|
f3cb3a2eb8 | ||
|
|
3ea9b9a014 | ||
|
|
52858be668 | ||
|
|
ff27233e30 | ||
|
|
0c48f0a15d | ||
|
|
c65cde4cf8 | ||
|
|
5c2b34e43b | ||
|
|
d4831181ea | ||
|
|
346977f501 | ||
|
|
9d942df352 | ||
|
|
89759cc1f1 | ||
|
|
4c35cc1b20 | ||
|
|
9dbad500bc | ||
|
|
4aaa39d091 | ||
|
|
58d383877d | ||
|
|
198ccfae4e | ||
|
|
c2cd728e03 | ||
|
|
46e7fbe120 | ||
|
|
d89bc1d998 | ||
|
|
0303ef72ac | ||
|
|
e938e439b9 | ||
|
|
8df17d3830 | ||
|
|
c7e83a8375 | ||
|
|
c436414352 | ||
|
|
b7e0fec5f0 | ||
|
|
cab2af9a71 | ||
|
|
da5bbe3cef | ||
|
|
154870da0b | ||
|
|
edba70557d | ||
|
|
fd15fc9c70 | ||
|
|
41f743b45d | ||
|
|
ccb316964d | ||
|
|
58911d8248 | ||
|
|
f09656e3f8 | ||
|
|
d609a6dde7 | ||
|
|
45a67fa0f3 | ||
|
|
32b070a76b | ||
|
|
9c7c4a739f | ||
|
|
50f96e15de | ||
|
|
7dd5793b75 | ||
|
|
b86e620743 | ||
|
|
6fb8946dd5 | ||
|
|
cfbcf719db | ||
|
|
8b515f3ba0 | ||
|
|
d512537417 | ||
|
|
2f5e4e1664 | ||
|
|
7b051df83f | ||
|
|
b96448269a | ||
|
|
b24a9a383a | ||
|
|
cd1647af66 | ||
|
|
d5fc3f267c | ||
|
|
f0ce129c43 | ||
|
|
d61f362211 | ||
|
|
8ea597c79a | ||
|
|
e7fbd6c53e | ||
|
|
8e49cac433 | ||
|
|
867e64cb4f | ||
|
|
3a8815ea7a | ||
|
|
d14baf4532 | ||
|
|
badd11e71c | ||
|
|
42dec9eb12 | ||
|
|
6b3f9c5d60 | ||
|
|
ba5aa67303 | ||
|
|
21ca7abf8d | ||
|
|
efb456cb0a | ||
|
|
4e12dc3303 | ||
|
|
896e83e3e6 | ||
|
|
8435016a43 | ||
|
|
416a929f7f | ||
|
|
af778aac0a | ||
|
|
5a5f2c00a8 | ||
|
|
5243f97c3b | ||
|
|
5a42883528 | ||
|
|
1827db2859 | ||
|
|
5b12c0f4a0 | ||
|
|
a0e6145884 | ||
|
|
e47e972cf2 | ||
|
|
3e1661a183 | ||
|
|
c4f29c6384 | ||
|
|
1f7f076189 | ||
|
|
90648bb477 | ||
|
|
d1494fe8da | ||
|
|
cad21d6eb1 | ||
|
|
a212bb0577 | ||
|
|
f24dd29cd2 | ||
|
|
0f25462ea6 | ||
|
|
ae4bbbd241 | ||
|
|
24bd9e3039 | ||
|
|
158ee6ac25 | ||
|
|
cefb4ba014 | ||
|
|
0efb8ef412 | ||
|
|
7c61d6d45f | ||
|
|
ff529e6dc1 | ||
|
|
1c0abaa6eb | ||
|
|
69775fcc07 | ||
|
|
ce772420dd | ||
|
|
6c2749536e | ||
|
|
4e4b8d8249 | ||
|
|
032a52e006 | ||
|
|
e3cc7fc38c | ||
|
|
f2e66730d0 | ||
|
|
0ebdfb3c37 | ||
|
|
fdb21e3e91 | ||
|
|
d66d430ab5 | ||
|
|
bbfc9fd448 | ||
|
|
e960e016af | ||
|
|
074c0d622f | ||
|
|
458a9f22da | ||
|
|
38730c6e68 | ||
|
|
21a4edb1f3 | ||
|
|
9cc01dc574 | ||
|
|
1ceec31422 | ||
|
|
8d8cf6201b | ||
|
|
d351946194 | ||
|
|
907f9e8411 | ||
|
|
66d59a90ed | ||
|
|
5795f1d7da | ||
|
|
de6dfe3707 | ||
|
|
148acfefcc | ||
|
|
b04f9e6774 | ||
|
|
0e090064c4 | ||
|
|
be06803804 | ||
|
|
0c63c94103 | ||
|
|
99c46e8186 | ||
|
|
30d0347825 | ||
|
|
ea50e4036e | ||
|
|
f73a3d35a8 | ||
|
|
02f14fd857 | ||
|
|
54bce00d4d | ||
|
|
d3e4b14997 | ||
|
|
6bad79fca0 | ||
|
|
13b3578d2f | ||
|
|
08b5b56566 | ||
|
|
c1a2d66804 | ||
|
|
c95c183bc4 | ||
|
|
ed4b770e1a | ||
|
|
b8c3e3e16f | ||
|
|
d9c1b62d38 | ||
|
|
1aa1ddcd78 | ||
|
|
5fed42eae8 | ||
|
|
1bbf8c0635 | ||
|
|
1fe07891e3 | ||
|
|
a88675ffa9 | ||
|
|
63db5499ee | ||
|
|
d6d839cd3e | ||
|
|
3534717151 | ||
|
|
547c71ad78 | ||
|
|
e289d481ea | ||
|
|
cdab4bdee0 | ||
|
|
ed8250e055 | ||
|
|
40cd845479 | ||
|
|
3b45241566 | ||
|
|
d5b0f85ea3 | ||
|
|
c6241e72a6 | ||
|
|
f528022a7d | ||
|
|
665a47607e | ||
|
|
12958497dc | ||
|
|
3a8bc7ffc6 | ||
|
|
9cbe358f84 | ||
|
|
27af499b52 | ||
|
|
24dff9cf20 | ||
|
|
e4265d3135 | ||
|
|
5e5cc99147 | ||
|
|
44bc176d08 | ||
|
|
3c145b42bc | ||
|
|
ae82650174 | ||
|
|
e8aa737b09 | ||
|
|
f42394daf5 | ||
|
|
10dacc2588 | ||
|
|
5067b956f4 | ||
|
|
605c1a76e0 | ||
|
|
39cfa9630c | ||
|
|
4b33558707 | ||
|
|
48e8072b26 | ||
|
|
37fa6fe343 | ||
|
|
f1eec05d0e | ||
|
|
4c7e52adaa | ||
|
|
4ca6d1f0f7 | ||
|
|
b8135bd205 | ||
|
|
dedadcd2ea | ||
|
|
983aeea850 | ||
|
|
0c44a9a504 | ||
|
|
b81339cf80 | ||
|
|
6c69b8a4d4 | ||
|
|
ec1237479e | ||
|
|
40e2a41477 | ||
|
|
f168d63586 | ||
|
|
78a84efb4b | ||
|
|
c6891376f4 | ||
|
|
83b281f36b | ||
|
|
86ec698a33 | ||
|
|
897e80e596 | ||
|
|
ab32799b52 | ||
|
|
bd236918dd | ||
|
|
3949a85f9a | ||
|
|
bab1852340 | ||
|
|
ef814b7a32 | ||
|
|
96fa6f0c1b | ||
|
|
e4ba1c1daf | ||
|
|
bbbf0466dc | ||
|
|
dc18b3e3d4 | ||
|
|
b5f82696e2 | ||
|
|
a5aa150a98 | ||
|
|
ae03782032 | ||
|
|
c729c6b93c | ||
|
|
324c383d8e | ||
|
|
35b7a69456 | ||
|
|
d69c32d7ef | ||
|
|
27aaff3dc2 | ||
|
|
fc8847cf4e | ||
|
|
1fcc00df96 | ||
|
|
697c2183d3 | ||
|
|
b320be70cb | ||
|
|
3f24188d19 | ||
|
|
3449087284 | ||
|
|
8e9f4d0078 | ||
|
|
d7794540b2 | ||
|
|
ae44b1d7b9 | ||
|
|
df12b2bd15 | ||
|
|
148a6a8860 | ||
|
|
efba3731e5 | ||
|
|
6eb332a984 | ||
|
|
d0a83f318b | ||
|
|
0f67b97065 | ||
|
|
d2c2e000a7 | ||
|
|
4ac1a532f3 | ||
|
|
f42dc4fa4d | ||
|
|
d25c7ddd6f | ||
|
|
48a9ad3652 | ||
|
|
d55541919d | ||
|
|
0d4740d1b1 | ||
|
|
d56711f799 | ||
|
|
99a47e407e | ||
|
|
7efb0e541e | ||
|
|
7340be98f6 | ||
|
|
c281eaf69f | ||
|
|
2110b98829 | ||
|
|
88537d02e4 | ||
|
|
a2729fcd7f | ||
|
|
bcd41cec71 | ||
|
|
6f6e896795 | ||
|
|
28a25080ca | ||
|
|
14f3f230c1 | ||
|
|
d32bbae431 | ||
|
|
710ff8d7ce | ||
|
|
683881f912 | ||
|
|
11d8aed6cd | ||
|
|
ab68410c4c | ||
|
|
fa614404e6 | ||
|
|
a6abd530bd | ||
|
|
2b809a5374 | ||
|
|
3e13137f6e | ||
|
|
6aa6e19d34 | ||
|
|
c2d8d8acbd | ||
|
|
299dcdd3eb | ||
|
|
e0f13b298d | ||
|
|
d2ac26f844 | ||
|
|
fae57d1422 | ||
|
|
c84a05b809 | ||
|
|
05e8918076 | ||
|
|
929cb9e62e | ||
|
|
7d1f2abd56 | ||
|
|
ab5f28aceb | ||
|
|
4450377794 | ||
|
|
45eaa442c3 | ||
|
|
4fa519134f | ||
|
|
815f62ce0c | ||
|
|
b3b5ea4064 | ||
|
|
573489db71 | ||
|
|
9c42f246ed | ||
|
|
dbdd2cb92f | ||
|
|
406596af70 | ||
|
|
73f02b10de | ||
|
|
9629f571bc | ||
|
|
5e50dc5acb | ||
|
|
59bfc22d40 | ||
|
|
1a8a147fe5 | ||
|
|
0612a9e8e9 | ||
|
|
f2889e698a | ||
|
|
ea546425e8 | ||
|
|
7269a5bf51 | ||
|
|
00d7e817c6 | ||
|
|
ed7d485b58 | ||
|
|
38d387c9a5 | ||
|
|
02dd90ebf9 | ||
|
|
e72e2568dd | ||
|
|
d9923a05e0 | ||
|
|
8c6fa66b2a | ||
|
|
84eae97f91 | ||
|
|
12099ed55e | ||
|
|
d0f5b69a19 | ||
|
|
ce9d30f80f | ||
|
|
e02d955aed | ||
|
|
b3fff20d1f | ||
|
|
8c41173678 | ||
|
|
0bed621d0c | ||
|
|
1d2754c3f6 | ||
|
|
ae2a867a7f | ||
|
|
207e496162 | ||
|
|
f0391db096 | ||
|
|
084994db9c | ||
|
|
f85da868ac | ||
|
|
f1f31e3dfe | ||
|
|
7f8e827db8 | ||
|
|
a63761f875 | ||
|
|
3ce16c89b7 | ||
|
|
f4ac3770b4 | ||
|
|
b0506a722e | ||
|
|
650ab563f4 | ||
|
|
90285c7d61 | ||
|
|
6c300ab717 | ||
|
|
05d8ba170b | ||
|
|
51f65152a5 | ||
|
|
1113357e35 | ||
|
|
d65a076c0d | ||
|
|
845139740f | ||
|
|
1f87b07689 | ||
|
|
cbaa1bca1c | ||
|
|
5fb6a06c37 | ||
|
|
6e38fc56f6 | ||
|
|
c00a05bfba | ||
|
|
9ec9327f5a | ||
|
|
11088df402 | ||
|
|
4ea76dc95c | ||
|
|
f0275d7e1b | ||
|
|
516c3e659f | ||
|
|
e62ddcb582 | ||
|
|
b3bc538df6 | ||
|
|
29fc94e29e | ||
|
|
466f7fd996 | ||
|
|
58cfe4e078 | ||
|
|
00f7577273 | ||
|
|
4e6d189a94 | ||
|
|
9abadd4985 | ||
|
|
66d4bc3f3c | ||
|
|
52cafe6c96 | ||
|
|
8d5aa46765 | ||
|
|
e5ec89ad5b | ||
|
|
7bba9cd2a5 | ||
|
|
cce629e791 | ||
|
|
bb15addad5 | ||
|
|
e9e3e88f63 | ||
|
|
c797a0611c | ||
|
|
04f3000646 | ||
|
|
f3eba3c482 | ||
|
|
02fa7b680f | ||
|
|
8fcd917e51 | ||
|
|
9c85d87b90 | ||
|
|
b45fc97564 | ||
|
|
986f68f7ed | ||
|
|
2cd9e1eb62 | ||
|
|
61804f201a | ||
|
|
cf104b0f10 | ||
|
|
17106a131d | ||
|
|
cc0dda95c4 | ||
|
|
7679e20e83 | ||
|
|
4349c091e7 | ||
|
|
ff60f51a7a | ||
|
|
06da1f195c | ||
|
|
3d98ad3f4c | ||
|
|
f1bb8999ab | ||
|
|
1e75dde7b2 | ||
|
|
f780839b87 | ||
|
|
204f15b4c1 | ||
|
|
a4fff39d7e | ||
|
|
10016a34e0 | ||
|
|
e133b44da6 | ||
|
|
5732d8de50 | ||
|
|
509b3c3016 | ||
|
|
8a9fa9bd18 | ||
|
|
a5eabfad91 | ||
|
|
6a77f1ff45 | ||
|
|
60283775b3 | ||
|
|
4433e4de2d | ||
|
|
aaf6f80d4c | ||
|
|
59fb789290 |
@@ -4,7 +4,8 @@ coverage:
|
||||
range: 60...90
|
||||
status:
|
||||
project:
|
||||
default: yes
|
||||
default:
|
||||
threshold: 0.2%
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
|
||||
23
.github/workflows/install_spack.sh
vendored
23
.github/workflows/install_spack.sh
vendored
@@ -1,5 +1,20 @@
|
||||
#!/usr/bin/env sh
|
||||
git clone https://github.com/spack/spack.git
|
||||
echo -e "config:\n build_jobs: 2" > spack/etc/spack/config.yaml
|
||||
. spack/share/spack/setup-env.sh
|
||||
spack compilers
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
# TODO: remove this explicit setting once apple-clang detection is fixed
|
||||
cat <<EOF > etc/spack/compilers.yaml
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: apple-clang@11.0.3
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /usr/local/bin/gfortran-9
|
||||
fc: /usr/local/bin/gfortran-9
|
||||
modules: []
|
||||
operating_system: catalina
|
||||
target: x86_64
|
||||
EOF
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
|
||||
24
.github/workflows/linux_build_tests.yaml
vendored
24
.github/workflows/linux_build_tests.yaml
vendored
@@ -3,13 +3,12 @@ name: linux builds
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
@@ -19,36 +18,41 @@ on:
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
package: [lz4, mpich, tut, py-setuptools, openjpeg, r-rcpp]
|
||||
package:
|
||||
- lz4 # MakefilePackage
|
||||
- mpich # AutotoolsPackage
|
||||
- tut # WafPackage
|
||||
- py-setuptools # PythonPackage
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Cache ccache's store
|
||||
uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ccache-build-${{ matrix.package }}
|
||||
restore-keys: |
|
||||
ccache-build-${{ matrix.package }}
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install System Packages
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev findutils openssl libssl-dev libpciaccess-dev
|
||||
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev ruby findutils openssl libssl-dev libpciaccess-dev
|
||||
R --version
|
||||
perl --version
|
||||
ruby --version
|
||||
- name: Copy Configuration
|
||||
run: |
|
||||
ccache -M 300M && ccache -z
|
||||
|
||||
70
.github/workflows/linux_unit_tests.yaml
vendored
70
.github/workflows/linux_unit_tests.yaml
vendored
@@ -3,13 +3,12 @@ name: linux tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
unittests:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -19,8 +18,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -36,9 +36,7 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git fetch -u origin develop:develop
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
@@ -56,7 +54,61 @@ jobs:
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Upload to codecov.io
|
||||
uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils gfortran gnupg2 mercurial ninja-build patchelf zsh fish
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
centos6:
|
||||
# Test for Python2.6 run on Centos 6
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:centos6
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
35
.github/workflows/macos_python.yml
vendored
35
.github/workflows/macos_python.yml
vendored
@@ -8,6 +8,13 @@ on:
|
||||
schedule:
|
||||
# nightly at 1 AM
|
||||
- cron: '0 1 * * *'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
# Run if we modify this yaml file
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
@@ -18,10 +25,14 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v gcc
|
||||
# 9.2.0 is the latest version on which we apply homebrew patch
|
||||
spack install -v --fail-fast gcc@9.2.0 %apple-clang
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
@@ -29,30 +40,40 @@ jobs:
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v py-jupyter %clang
|
||||
spack config add packages:opengl:paths:opengl@4.1:/usr/X11R6
|
||||
spack install -v --fail-fast py-jupyter %apple-clang
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v py-scipy %clang
|
||||
spack install -v py-matplotlib %clang
|
||||
spack install -v py-pandas %clang
|
||||
spack install -v --fail-fast py-scipy %apple-clang
|
||||
spack install -v --fail-fast py-matplotlib %apple-clang
|
||||
spack install -v --fail-fast py-pandas %apple-clang
|
||||
|
||||
install_mpi4py_clang:
|
||||
name: mpi4py, petsc4py
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v py-mpi4py %clang
|
||||
spack install -v py-petsc4py %clang
|
||||
spack install -v --fail-fast py-mpi4py %apple-clang
|
||||
spack install -v --fail-fast py-petsc4py %apple-clang
|
||||
|
||||
24
.github/workflows/macos_unit_tests.yaml
vendored
24
.github/workflows/macos_unit_tests.yaml
vendored
@@ -3,27 +3,22 @@ name: macos tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
@@ -31,19 +26,16 @@ jobs:
|
||||
pip install --upgrade flake8 pep8-naming
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew update
|
||||
brew upgrade
|
||||
brew install gcc gnupg2 dash kcov
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
git --version
|
||||
git fetch -u origin develop:develop
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) test
|
||||
coverage run $(which spack) unit-test
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Upload to codecov.io
|
||||
uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
||||
31
.github/workflows/minimum_python_versions.yaml
vendored
31
.github/workflows/minimum_python_versions.yaml
vendored
@@ -1,31 +0,0 @@
|
||||
name: python version check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
jobs:
|
||||
validate:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: Minimum Version (Spack's Core)
|
||||
run: vermin --backport argparse -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: Minimum Version (Repositories)
|
||||
run: vermin --backport argparse -t=2.6- -t=3.5- -v var/spack/repos
|
||||
9
.github/workflows/setup_git.sh
vendored
Executable file
9
.github/workflows/setup_git.sh
vendored
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env sh
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
# With fetch-depth: 0 we have a remote develop
|
||||
# but not a local branch. Don't do this on develop
|
||||
if [ "$(git branch --show-current)" != "develop" ]
|
||||
then
|
||||
git branch develop origin/develop
|
||||
fi
|
||||
65
.github/workflows/style_and_docs.yaml
vendored
Normal file
65
.github/workflows/style_and_docs.yaml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
name: style and docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: Minimum Version (Spack's Core)
|
||||
run: vermin --backport argparse -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: Minimum Version (Repositories)
|
||||
run: vermin --backport argparse -t=2.6- -t=3.5- -v var/spack/repos
|
||||
flake8:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run flake8 tests
|
||||
run: |
|
||||
share/spack/qa/run-flake8-tests
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils ninja-build graphviz
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
share/spack/qa/run-doc-tests
|
||||
4
.mailmap
4
.mailmap
@@ -20,8 +20,8 @@ Geoffrey Oxberry <oxberry1@llnl.gov> Geoffrey Oxberry
|
||||
Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <gjohnson@argon-ohpc.hpc.uiowa.edu>
|
||||
Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <glennpj@gmail.com>
|
||||
Gregory Becker <becker33@llnl.gov> Gregory Becker <becker33.llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> becker33 <becker33.llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> becker33 <becker33@llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> Gregory Becker <becker33.llnl.gov>
|
||||
Gregory Becker <becker33@llnl.gov> Gregory Becker <becker33@llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Greg Lee <lee218@llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
|
||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
|
||||
|
||||
152
.travis.yml
152
.travis.yml
@@ -1,152 +0,0 @@
|
||||
#=============================================================================
|
||||
# Project settings
|
||||
#=============================================================================
|
||||
# Only build master and develop on push; do not build every branch.
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /^releases\/.*$/
|
||||
|
||||
#=============================================================================
|
||||
# Build matrix
|
||||
#=============================================================================
|
||||
|
||||
dist: bionic
|
||||
|
||||
jobs:
|
||||
fast_finish: true
|
||||
include:
|
||||
- stage: 'style checks'
|
||||
python: '3.8'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=flake8
|
||||
- stage: 'unit tests + documentation'
|
||||
python: '2.6'
|
||||
dist: trusty
|
||||
os: linux
|
||||
language: python
|
||||
addons:
|
||||
apt:
|
||||
# Everything but patchelf, that is not available for trusty
|
||||
packages:
|
||||
- ccache
|
||||
- gfortran
|
||||
- graphviz
|
||||
- gnupg2
|
||||
- kcov
|
||||
- mercurial
|
||||
- ninja-build
|
||||
- realpath
|
||||
- zsh
|
||||
- fish
|
||||
env: [ TEST_SUITE=unit, COVERAGE=true ]
|
||||
- python: '3.8'
|
||||
os: linux
|
||||
language: python
|
||||
env: [ TEST_SUITE=shell, COVERAGE=true, KCOV_VERSION=38 ]
|
||||
- python: '3.8'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=doc
|
||||
|
||||
stages:
|
||||
- 'style checks'
|
||||
- 'unit tests + documentation'
|
||||
|
||||
|
||||
#=============================================================================
|
||||
# Environment
|
||||
#=============================================================================
|
||||
|
||||
# Docs need graphviz to build
|
||||
addons:
|
||||
# for Linux builds, we use APT
|
||||
apt:
|
||||
packages:
|
||||
- ccache
|
||||
- coreutils
|
||||
- gfortran
|
||||
- graphviz
|
||||
- gnupg2
|
||||
- mercurial
|
||||
- ninja-build
|
||||
- patchelf
|
||||
- zsh
|
||||
- fish
|
||||
update: true
|
||||
|
||||
# ~/.ccache needs to be cached directly as Travis is not taking care of it
|
||||
# (possibly because we use 'language: python' and not 'language: c')
|
||||
cache:
|
||||
pip: true
|
||||
ccache: true
|
||||
directories:
|
||||
- ~/.ccache
|
||||
|
||||
before_install:
|
||||
- ccache -M 2G && ccache -z
|
||||
# Install kcov manually, since it's not packaged for bionic beaver
|
||||
- if [[ "$KCOV_VERSION" ]]; then
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev;
|
||||
KCOV_ROOT=$(mktemp -d);
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz;
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz;
|
||||
mkdir -p ${KCOV_ROOT}/build;
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - ;
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install;
|
||||
fi
|
||||
|
||||
# Install various dependencies
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip install --upgrade six
|
||||
- pip install --upgrade setuptools
|
||||
- pip install --upgrade codecov coverage==4.5.4
|
||||
- pip install --upgrade flake8
|
||||
- pip install --upgrade pep8-naming
|
||||
- if [[ "$TEST_SUITE" == "doc" ]]; then
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt;
|
||||
fi
|
||||
|
||||
before_script:
|
||||
# Need this for the git tests to succeed.
|
||||
- git config --global user.email "spack@example.com"
|
||||
- git config --global user.name "Test User"
|
||||
|
||||
# Need this to be able to compute the list of changed files
|
||||
- git fetch origin ${TRAVIS_BRANCH}:${TRAVIS_BRANCH}
|
||||
|
||||
#=============================================================================
|
||||
# Building
|
||||
#=============================================================================
|
||||
script:
|
||||
- share/spack/qa/run-$TEST_SUITE-tests
|
||||
|
||||
after_success:
|
||||
- ccache -s
|
||||
- case "$TEST_SUITE" in
|
||||
unit)
|
||||
if [[ "$COVERAGE" == "true" ]]; then
|
||||
codecov --env PYTHON_VERSION
|
||||
--required
|
||||
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
|
||||
fi
|
||||
;;
|
||||
shell)
|
||||
codecov --env PYTHON_VERSION
|
||||
--required
|
||||
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
|
||||
esac
|
||||
|
||||
#=============================================================================
|
||||
# Notifications
|
||||
#=============================================================================
|
||||
notifications:
|
||||
email:
|
||||
recipients:
|
||||
- tgamblin@llnl.gov
|
||||
- massimiliano.culpo@gmail.com
|
||||
on_success: change
|
||||
on_failure: always
|
||||
48
CHANGELOG.md
48
CHANGELOG.md
@@ -1,3 +1,51 @@
|
||||
# v0.15.4 (2020-08-12)
|
||||
|
||||
This release contains one feature addition:
|
||||
|
||||
* Users can set `SPACK_GNUPGHOME` to override Spack's GPG path (#17139)
|
||||
|
||||
Several bugfixes for CUDA, binary packaging, and `spack -V`:
|
||||
|
||||
* CUDA package's `.libs` method searches for `libcudart` instead of `libcuda` (#18000)
|
||||
* Don't set `CUDAHOSTCXX` in environments that contain CUDA (#17826)
|
||||
* `buildcache create`: `NoOverwriteException` is a warning, not an error (#17832)
|
||||
* Fix `spack buildcache list --allarch` (#17884)
|
||||
* `spack -V` works with `releases/latest` tag and shallow clones (#17884)
|
||||
|
||||
And fixes for GitHub Actions and tests to ensure that CI passes on the
|
||||
release branch (#15687, #17279, #17328, #17377, #17732).
|
||||
|
||||
# v0.15.3 (2020-07-28)
|
||||
|
||||
This release contains the following bugfixes:
|
||||
|
||||
* Fix handling of relative view paths (#17721)
|
||||
* Fixes for binary relocation (#17418, #17455)
|
||||
* Fix redundant printing of error messages in build environment (#17709)
|
||||
|
||||
It also adds a support script for Spack tutorials:
|
||||
|
||||
* Add a tutorial setup script to share/spack (#17705, #17722)
|
||||
|
||||
# v0.15.2 (2020-07-23)
|
||||
|
||||
This minor release includes two new features:
|
||||
|
||||
* Spack install verbosity is decreased, and more debug levels are added (#17546)
|
||||
* The $spack/share/spack/keys directory contains public keys that may be optionally trusted for public binary mirrors (#17684)
|
||||
|
||||
This release also includes several important fixes:
|
||||
|
||||
* MPICC and related variables are now cleand in the build environment (#17450)
|
||||
* LLVM flang only builds CUDA offload components when +cuda (#17466)
|
||||
* CI pipelines no longer upload user environments that can contain secrets to the internet (#17545)
|
||||
* CI pipelines add bootstrapped compilers to the compiler config (#17536)
|
||||
* `spack buildcache list` does not exit on first failure and lists later mirrors (#17565)
|
||||
* Apple's "gcc" executable that is an apple-clang compiler does not generate a gcc compiler config (#17589)
|
||||
* Mixed compiler toolchains are merged more naturally across different compiler suffixes (#17590)
|
||||
* Cray Shasta platforms detect the OS properly (#17467)
|
||||
* Additional more minor fixes.
|
||||
|
||||
# v0.15.1 (2020-07-10)
|
||||
|
||||
This minor release includes several important fixes:
|
||||
|
||||
27
LICENSE-MIT
27
LICENSE-MIT
@@ -1,20 +1,21 @@
|
||||
Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
Copyright (c) 2013-2020 LLNS, LLC and other Spack Project Developers.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
33
README.md
33
README.md
@@ -4,7 +4,6 @@
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://travis-ci.com/spack/spack)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://spackpm.herokuapp.com)
|
||||
@@ -74,15 +73,33 @@ When you send your request, make ``develop`` the destination branch on the
|
||||
|
||||
Your PR must pass Spack's unit tests and documentation tests, and must be
|
||||
[PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant. We enforce
|
||||
these guidelines with [Travis CI](https://travis-ci.org/spack/spack). To
|
||||
run these tests locally, and for helpful tips on git, see our
|
||||
these guidelines with our CI process. To run these tests locally, and for
|
||||
helpful tips on git, see our
|
||||
[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
|
||||
|
||||
Spack uses a rough approximation of the
|
||||
[Git Flow](http://nvie.com/posts/a-successful-git-branching-model/)
|
||||
branching model. The ``develop`` branch contains the latest
|
||||
contributions, and ``master`` is always tagged and points to the latest
|
||||
stable release.
|
||||
Spack's `develop` branch has the latest contributions. Pull requests
|
||||
should target `develop`, and users who want the latest package versions,
|
||||
features, etc. can use `develop`.
|
||||
|
||||
Releases
|
||||
--------
|
||||
|
||||
For multi-user site deployments or other use cases that need very stable
|
||||
software installations, we recommend using Spack's
|
||||
[stable releases](https://github.com/spack/spack/releases).
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
`releases/v0.14` has `0.14.x` versions of Spack, and `releases/v0.13` has
|
||||
`0.13.x` versions. We backport important bug fixes to these branches but
|
||||
we do not advance the package versions or make other changes that would
|
||||
change the way Spack concretizes dependencies within a release branch.
|
||||
So, you can base your Spack deployment on a release branch and `git pull`
|
||||
to get fixes, without the package churn that comes with `develop`.
|
||||
|
||||
The latest release is always available with the `releases/latest` tag.
|
||||
|
||||
See the [docs on releases](https://spack.readthedocs.io/en/latest/developer_guide.html#releases)
|
||||
for more details.
|
||||
|
||||
Code of Conduct
|
||||
------------------------
|
||||
|
||||
@@ -64,6 +64,10 @@ config:
|
||||
- ~/.spack/stage
|
||||
# - $spack/var/spack/stage
|
||||
|
||||
# Directory in which to run tests and store test results.
|
||||
# Tests will be stored in directories named by date/time and package
|
||||
# name/hash.
|
||||
test_stage: ~/.spack/test
|
||||
|
||||
# Cache directory for already downloaded source tarballs and archived
|
||||
# repositories. This can be purged with `spack clean --downloads`.
|
||||
|
||||
@@ -21,11 +21,14 @@ packages:
|
||||
- gcc
|
||||
- intel
|
||||
providers:
|
||||
elf: [libelf]
|
||||
unwind: [apple-libunwind]
|
||||
elf:
|
||||
- libelf
|
||||
unwind:
|
||||
- apple-libunwind
|
||||
apple-libunwind:
|
||||
paths:
|
||||
buildable: false
|
||||
externals:
|
||||
# Apple bundles libunwind version 35.3 with macOS 10.9 and later,
|
||||
# although the version number used here isn't critical
|
||||
apple-libunwind@35.3: /usr
|
||||
buildable: False
|
||||
- spec: apple-libunwind@35.3
|
||||
prefix: /usr
|
||||
|
||||
@@ -38,7 +38,7 @@ packages:
|
||||
mpi: [openmpi, mpich]
|
||||
mysql-client: [mysql, mariadb-c-client]
|
||||
opencl: [pocl]
|
||||
pil: [py-pillow]
|
||||
pil: [py-pillow-simd]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack]
|
||||
|
||||
@@ -695,11 +695,11 @@ Here is an example of a much longer spec than we've seen thus far:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
|
||||
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt target=x86_64 ^callpath @1.1 %gcc@4.7.2
|
||||
|
||||
If provided to ``spack install``, this will install the ``mpileaks``
|
||||
library at some version between ``1.2`` and ``1.4`` (inclusive),
|
||||
built using ``gcc`` at version 4.7.5 for the Blue Gene/Q architecture,
|
||||
built using ``gcc`` at version 4.7.5 for a generic ``x86_64`` architecture,
|
||||
with debug options enabled, and without Qt support. Additionally, it
|
||||
says to link it with the ``callpath`` library (which it depends on),
|
||||
and to build callpath with ``gcc`` 4.7.2. Most specs will not be as
|
||||
|
||||
@@ -57,10 +57,13 @@ directory. Here's an example of an external configuration:
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This example lists three installations of OpenMPI, one built with GCC,
|
||||
one built with GCC and debug information, and another built with Intel.
|
||||
@@ -76,13 +79,15 @@ of the installation prefixes. The following example says that module
|
||||
.. code-block:: yaml
|
||||
|
||||
cmake:
|
||||
modules:
|
||||
cmake@3.7.2: CMake/3.7.2
|
||||
externals:
|
||||
- spec: cmake@3.7.2
|
||||
modules:
|
||||
- CMake/3.7.2
|
||||
|
||||
Each ``packages.yaml`` begins with a ``packages:`` token, followed
|
||||
by a list of package names. To specify externals, add a ``paths`` or ``modules``
|
||||
token under the package name, which lists externals in a
|
||||
``spec: /path`` or ``spec: module-name`` format. Each spec should be as
|
||||
Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
|
||||
by a list of package names. To specify externals, add an ``externals:``
|
||||
attribute under the package name, which lists externals.
|
||||
Each external should specify a ``spec:`` string that should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
@@ -106,10 +111,13 @@ be:
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
@@ -137,10 +145,13 @@ but more conveniently:
|
||||
mpi:
|
||||
buildable: False
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Implementations can also be listed immediately under the virtual they provide:
|
||||
|
||||
@@ -172,8 +183,9 @@ After running this command your ``packages.yaml`` may include new entries:
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
paths:
|
||||
cmake@3.17.2: /usr
|
||||
externals:
|
||||
- spec: cmake@3.17.2
|
||||
prefix: /usr
|
||||
|
||||
Generally this is useful for detecting a small set of commonly-used packages;
|
||||
for now this is generally limited to finding build-only dependencies.
|
||||
|
||||
@@ -29,6 +29,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Make-incompatible
|
||||
|
||||
build_systems/mavenpackage
|
||||
build_systems/sconspackage
|
||||
build_systems/wafpackage
|
||||
|
||||
|
||||
@@ -175,7 +175,7 @@ In the ``perl`` package, we can see:
|
||||
|
||||
@run_after('build')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
make('test')
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
|
||||
@@ -418,9 +418,13 @@ Adapt the following example. Be sure to maintain the indentation:
|
||||
# other content ...
|
||||
|
||||
intel-mkl:
|
||||
modules:
|
||||
intel-mkl@2018.2.199 arch=linux-centos6-x86_64: intel-mkl/18/18.0.2
|
||||
intel-mkl@2018.3.222 arch=linux-centos6-x86_64: intel-mkl/18/18.0.3
|
||||
externals:
|
||||
- spec: "intel-mkl@2018.2.199 arch=linux-centos6-x86_64"
|
||||
modules:
|
||||
- intel-mkl/18/18.0.2
|
||||
- spec: "intel-mkl@2018.3.222 arch=linux-centos6-x86_64"
|
||||
modules:
|
||||
- intel-mkl/18/18.0.3
|
||||
|
||||
The version numbers for the ``intel-mkl`` specs defined here correspond to file
|
||||
and directory names that Intel uses for its products because they were adopted
|
||||
@@ -451,12 +455,16 @@ mechanism.
|
||||
|
||||
packages:
|
||||
intel-parallel-studio:
|
||||
modules:
|
||||
intel-parallel-studio@cluster.2018.2.199 +mkl+mpi+ipp+tbb+daal arch=linux-centos6-x86_64: intel/18/18.0.2
|
||||
intel-parallel-studio@cluster.2018.3.222 +mkl+mpi+ipp+tbb+daal arch=linux-centos6-x86_64: intel/18/18.0.3
|
||||
externals:
|
||||
- spec: "intel-parallel-studio@cluster.2018.2.199 +mkl+mpi+ipp+tbb+daal arch=linux-centos6-x86_64"
|
||||
modules:
|
||||
- intel/18/18.0.2
|
||||
- spec: "intel-parallel-studio@cluster.2018.3.222 +mkl+mpi+ipp+tbb+daal arch=linux-centos6-x86_64"
|
||||
modules:
|
||||
- intel/18/18.0.3
|
||||
buildable: False
|
||||
|
||||
One additional example illustrates the use of ``paths:`` instead of
|
||||
One additional example illustrates the use of ``prefix:`` instead of
|
||||
``modules:``, useful when external modulefiles are not available or not
|
||||
suitable:
|
||||
|
||||
@@ -464,13 +472,15 @@ suitable:
|
||||
|
||||
packages:
|
||||
intel-parallel-studio:
|
||||
paths:
|
||||
intel-parallel-studio@cluster.2018.2.199 +mkl+mpi+ipp+tbb+daal: /opt/intel
|
||||
intel-parallel-studio@cluster.2018.3.222 +mkl+mpi+ipp+tbb+daal: /opt/intel
|
||||
externals:
|
||||
- spec: "intel-parallel-studio@cluster.2018.2.199 +mkl+mpi+ipp+tbb+daal"
|
||||
prefix: /opt/intel
|
||||
- spec: "intel-parallel-studio@cluster.2018.3.222 +mkl+mpi+ipp+tbb+daal"
|
||||
prefix: /opt/intel
|
||||
buildable: False
|
||||
|
||||
Note that for the Intel packages discussed here, the directory values in the
|
||||
``paths:`` entries must be the high-level and typically version-less
|
||||
``prefix:`` entries must be the high-level and typically version-less
|
||||
"installation directory" that has been used by Intel's product installer.
|
||||
Such a directory will typically accumulate various product versions. Amongst
|
||||
them, Spack will select the correct version-specific product directory based on
|
||||
|
||||
84
lib/spack/docs/build_systems/mavenpackage.rst
Normal file
84
lib/spack/docs/build_systems/mavenpackage.rst
Normal file
@@ -0,0 +1,84 @@
|
||||
.. Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _mavenpackage:
|
||||
|
||||
------------
|
||||
MavenPackage
|
||||
------------
|
||||
|
||||
Apache Maven is a general-purpose build system that does not rely
|
||||
on Makefiles to build software. It is designed for building and
|
||||
managing and Java-based project.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``MavenPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``build`` - compile code and package into a JAR file
|
||||
#. ``install`` - copy to installation prefix
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mvn package
|
||||
$ install . <prefix>
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Maven packages can be identified by the presence of a ``pom.xml`` file.
|
||||
This file lists dependencies and other metadata about the project.
|
||||
There may also be configuration files in the ``.mvn`` directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Maven requires the ``mvn`` executable to build the project. It also
|
||||
requires Java at both build- and run-time. Because of this, the base
|
||||
class automatically adds the following dependencies:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
|
||||
|
||||
In the ``pom.xml`` file, you may see sections like:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<requireJavaVersion>
|
||||
<version>[1.7,)</version>
|
||||
</requireJavaVersion>
|
||||
<requireMavenVersion>
|
||||
<version>[3.5.4,)</version>
|
||||
</requireMavenVersion>
|
||||
|
||||
|
||||
This specifies the versions of Java and Maven that are required to
|
||||
build the package. See
|
||||
https://docs.oracle.com/middleware/1212/core/MAVEN/maven_version.htm#MAVEN402
|
||||
for a description of this version range syntax. In this case, you
|
||||
should add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('java@7:', type='build')
|
||||
depends_on('maven@3.5.4:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the Maven build system, see:
|
||||
https://maven.apache.org/index.html
|
||||
@@ -81,6 +81,24 @@ you'll need to define a function for it like so:
|
||||
self.setup_py('configure')
|
||||
|
||||
|
||||
^^^^^^
|
||||
Wheels
|
||||
^^^^^^
|
||||
|
||||
Some Python packages are closed-source and distributed as wheels.
|
||||
Instead of using the ``PythonPackage`` base class, you should extend
|
||||
the ``Package`` base class and implement the following custom installation
|
||||
procedure:
|
||||
|
||||
.. code-block::
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pip = which('pip')
|
||||
pip('install', self.stage.archive_file, '--prefix={0}'.format(prefix))
|
||||
|
||||
|
||||
This will require a dependency on pip, as mentioned below.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
@@ -95,6 +113,27 @@ file should be considered to be the truth. As dependencies are added or
|
||||
removed, the documentation is much more likely to become outdated than
|
||||
the ``setup.py``.
|
||||
|
||||
The Python ecosystem has evolved significantly over the years. Before
|
||||
setuptools became popular, most packages listed their dependencies in a
|
||||
``requirements.txt`` file. Once setuptools took over, these dependencies
|
||||
were listed directly in the ``setup.py``. Newer PEPs introduced additional
|
||||
files, like ``setup.cfg`` and ``pyproject.toml``. You should look out for
|
||||
all of these files, as they may all contain important information about
|
||||
package dependencies.
|
||||
|
||||
Some Python packages are closed-source and are distributed as Python
|
||||
wheels. For example, ``py-azureml-sdk`` downloads a ``.whl`` file. This
|
||||
file is simply a zip file, and can be extracted using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ unzip *.whl
|
||||
|
||||
|
||||
The zip file will not contain a ``setup.py``, but it will contain a
|
||||
``METADATA`` file which contains all the information you need to
|
||||
write a ``package.py`` build recipe.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Finding Python packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -105,8 +144,9 @@ it the only option for developers who want a simple installation.
|
||||
Search for "PyPI <package-name>" to find the download page. Note that
|
||||
some pages are versioned, and the first result may not be the newest
|
||||
version. Click on the "Latest Version" button to the top right to see
|
||||
if a newer version is available. The download page is usually at:
|
||||
https://pypi.org/project/<package-name>
|
||||
if a newer version is available. The download page is usually at::
|
||||
|
||||
https://pypi.org/project/<package-name>
|
||||
|
||||
^^^^^^^^^^^
|
||||
Description
|
||||
@@ -151,39 +191,67 @@ replacing this with the requested version. Obviously, if Spack cannot
|
||||
guess the version correctly, or if non-version-related things change
|
||||
in the URL, Spack cannot substitute the version properly.
|
||||
|
||||
Once upon a time, PyPI offered nice, simple download URLs like:
|
||||
https://pypi.python.org/packages/source/n/numpy/numpy-1.13.1.zip
|
||||
Once upon a time, PyPI offered nice, simple download URLs like::
|
||||
|
||||
https://pypi.python.org/packages/source/n/numpy/numpy-1.13.1.zip
|
||||
|
||||
|
||||
As you can see, the version is 1.13.1. It probably isn't hard to guess
|
||||
what URL to use to download version 1.12.0, and Spack was perfectly
|
||||
capable of performing this calculation.
|
||||
|
||||
However, PyPI switched to a new download URL format:
|
||||
https://pypi.python.org/packages/c0/3a/40967d9f5675fbb097ffec170f59c2ba19fc96373e73ad47c2cae9a30aed/numpy-1.13.1.zip#md5=2c3c0f4edf720c3a7b525dacc825b9ae
|
||||
However, PyPI switched to a new download URL format::
|
||||
|
||||
https://pypi.python.org/packages/c0/3a/40967d9f5675fbb097ffec170f59c2ba19fc96373e73ad47c2cae9a30aed/numpy-1.13.1.zip#md5=2c3c0f4edf720c3a7b525dacc825b9ae
|
||||
|
||||
|
||||
and more recently::
|
||||
|
||||
https://files.pythonhosted.org/packages/b0/2b/497c2bb7c660b2606d4a96e2035e92554429e139c6c71cdff67af66b58d2/numpy-1.14.3.zip
|
||||
|
||||
and more recently:
|
||||
https://files.pythonhosted.org/packages/b0/2b/497c2bb7c660b2606d4a96e2035e92554429e139c6c71cdff67af66b58d2/numpy-1.14.3.zip
|
||||
|
||||
As you can imagine, it is impossible for Spack to guess what URL to
|
||||
use to download version 1.12.0 given this URL. There is a solution,
|
||||
however. PyPI offers a new hidden interface for downloading
|
||||
Python packages that does not include a hash in the URL:
|
||||
https://pypi.io/packages/source/n/numpy/numpy-1.13.1.zip
|
||||
Python packages that does not include a hash in the URL::
|
||||
|
||||
This URL redirects to the files.pythonhosted.org URL. The general syntax for
|
||||
this pypi.io URL is:
|
||||
https://pypi.io/packages/source/<first-letter-of-name>/<name>/<name>-<version>.<extension>
|
||||
https://pypi.io/packages/source/n/numpy/numpy-1.13.1.zip
|
||||
|
||||
|
||||
This URL redirects to the https://files.pythonhosted.org URL. The general
|
||||
syntax for this https://pypi.io URL is::
|
||||
|
||||
https://pypi.io/packages/<type>/<first-letter-of-name>/<name>/<name>-<version>.<extension>
|
||||
|
||||
|
||||
Please use the https://pypi.io URL instead of the https://pypi.python.org
|
||||
URL. If both ``.tar.gz`` and ``.zip`` versions are available, ``.tar.gz``
|
||||
is preferred. If some releases offer both ``.tar.gz`` and ``.zip`` versions,
|
||||
but some only offer ``.zip`` versions, use ``.zip``.
|
||||
|
||||
Some Python packages are closed-source and do not ship ``.tar.gz`` or ``.zip``
|
||||
files on either PyPI or GitHub. If this is the case, you can still download
|
||||
and install a Python wheel. For example, ``py-azureml-sdk`` is closed source
|
||||
and can be downloaded from::
|
||||
|
||||
https://pypi.io/packages/py3/a/azureml_sdk/azureml_sdk-1.11.0-py3-none-any.whl
|
||||
|
||||
|
||||
Note that instead of ``<type>`` being ``source``, it is now ``py3`` since this
|
||||
wheel will work for any generic version of Python 3. You may see Python-specific
|
||||
or OS-specific URLs. Note that when you add a ``.whl`` URL, you should add
|
||||
``expand=False`` to ensure that Spack doesn't try to extract the wheel:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.11.0', sha256='d8c9d24ea90457214d798b0d922489863dad518adde3638e08ef62de28fb183a', expand=False)
|
||||
|
||||
Please use the pypi.io URL instead of the pypi.python.org URL. If both
|
||||
``.tar.gz`` and ``.zip`` versions are available, ``.tar.gz`` is preferred.
|
||||
If some releases offer both ``.tar.gz`` and ``.zip`` versions, but some
|
||||
only offer ``.zip`` versions, use ``.zip``.
|
||||
|
||||
"""""""""""""""
|
||||
PyPI vs. GitHub
|
||||
"""""""""""""""
|
||||
|
||||
Many packages are hosted on PyPI, but are developed on GitHub and other
|
||||
Many packages are hosted on PyPI, but are developed on GitHub or another
|
||||
version control systems. The tarball can be downloaded from either
|
||||
location, but PyPI is preferred for the following reasons:
|
||||
|
||||
@@ -226,7 +294,7 @@ location, but PyPI is preferred for the following reasons:
|
||||
|
||||
There are some reasons to prefer downloading from GitHub:
|
||||
|
||||
#. The GitHub tarball may contain unit tests
|
||||
#. The GitHub tarball may contain unit tests.
|
||||
|
||||
As previously mentioned, the PyPI tarball contains the bare minimum
|
||||
of files to install the package. Unless explicitly specified by the
|
||||
@@ -234,12 +302,6 @@ There are some reasons to prefer downloading from GitHub:
|
||||
If you desire to run the unit tests during installation, you should
|
||||
use the GitHub tarball instead.
|
||||
|
||||
#. Spack does not yet support ``spack versions`` and ``spack checksum``
|
||||
with PyPI URLs
|
||||
|
||||
These commands work just fine with GitHub URLs. This is a minor
|
||||
annoyance, not a reason to prefer GitHub over PyPI.
|
||||
|
||||
If you really want to run these unit tests, no one will stop you from
|
||||
submitting a PR for a new package that downloads from GitHub.
|
||||
|
||||
@@ -280,8 +342,8 @@ If Python 2.7 is the only version that works, you can use:
|
||||
|
||||
|
||||
The documentation may not always specify supported Python versions.
|
||||
Another place to check is in the ``setup.py`` file. Look for a line
|
||||
containing ``python_requires``. An example from
|
||||
Another place to check is in the ``setup.py`` or ``setup.cfg`` file.
|
||||
Look for a line containing ``python_requires``. An example from
|
||||
`py-numpy <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-numpy/package.py>`_
|
||||
looks like:
|
||||
|
||||
@@ -290,7 +352,7 @@ looks like:
|
||||
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*'
|
||||
|
||||
|
||||
More commonly, you will find a version check at the top of the file:
|
||||
You may also find a version check at the top of the ``setup.py``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -305,6 +367,39 @@ This can be converted to Spack's spec notation like so:
|
||||
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
|
||||
|
||||
|
||||
If you are writing a recipe for a package that only distributes
|
||||
wheels, look for a section in the ``METADATA`` file that looks like::
|
||||
|
||||
Requires-Python: >=3.5,<4
|
||||
|
||||
|
||||
This would be translated to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
depends_on('python@3.5:3.999', type=('build', 'run'))
|
||||
|
||||
|
||||
Many ``setup.py`` or ``setup.cfg`` files also contain information like::
|
||||
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.6
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.3
|
||||
Programming Language :: Python :: 3.4
|
||||
Programming Language :: Python :: 3.5
|
||||
Programming Language :: Python :: 3.6
|
||||
|
||||
|
||||
This is a list of versions of Python that the developer likely tests.
|
||||
However, you should not use this to restrict the versions of Python
|
||||
the package uses unless one of the two former methods (``python_requires``
|
||||
or ``sys.version_info``) is used. There is no logic in setuptools
|
||||
that prevents the package from building for Python versions not in
|
||||
this list, and often new releases like Python 3.7 or 3.8 work just fine.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
""""""""""
|
||||
@@ -317,7 +412,7 @@ Most notably, there was no way to list a project's dependencies
|
||||
with distutils. Along came setuptools, a non-builtin build system
|
||||
designed to overcome the limitations of distutils. Both projects
|
||||
use a similar API, making the transition easy while adding much
|
||||
needed functionality. Today, setuptools is used in around 75% of
|
||||
needed functionality. Today, setuptools is used in around 90% of
|
||||
the Python packages in Spack.
|
||||
|
||||
Since setuptools isn't built-in to Python, you need to add it as a
|
||||
@@ -360,6 +455,20 @@ run-time. This can be specified as:
|
||||
depends_on('py-setuptools', type='build')
|
||||
|
||||
|
||||
"""
|
||||
pip
|
||||
"""
|
||||
|
||||
Packages distributed as Python wheels will require an extra dependency
|
||||
on pip:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-pip', type='build')
|
||||
|
||||
|
||||
We will use pip to install the actual wheel.
|
||||
|
||||
""""""
|
||||
cython
|
||||
""""""
|
||||
@@ -383,6 +492,12 @@ where speed is crucial. There is no reason why someone would not
|
||||
want an optimized version of a library instead of the pure-Python
|
||||
version.
|
||||
|
||||
Note that some release tarballs come pre-cythonized, and cython is
|
||||
not needed as a dependency. However, this is becoming less common
|
||||
as Python continues to evolve and developers discover that cythonized
|
||||
sources are no longer compatible with newer versions of Python and
|
||||
need to be re-cythonized.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Python dependencies
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -429,15 +544,26 @@ Obviously, this means that ``py-numpy`` is a dependency.
|
||||
|
||||
If the package uses ``setuptools``, check for the following clues:
|
||||
|
||||
* ``python_requires``
|
||||
|
||||
As mentioned above, this specifies which versions of Python are
|
||||
required.
|
||||
|
||||
* ``setup_requires``
|
||||
|
||||
These packages are usually only needed at build-time, so you can
|
||||
add them with ``type='build'``.
|
||||
|
||||
* ``install_requires``
|
||||
|
||||
These packages are required for installation.
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``extra_requires``
|
||||
|
||||
These packages are optional dependencies that enable additional
|
||||
functionality. You should add a variant that optionally adds these
|
||||
dependencies.
|
||||
dependencies. This variant should be False by default.
|
||||
|
||||
* ``test_requires``
|
||||
|
||||
@@ -461,13 +587,37 @@ sphinx. If you can't find any information about the package's
|
||||
dependencies, you can take a look in ``requirements.txt``, but be sure
|
||||
not to add test or documentation dependencies.
|
||||
|
||||
Newer PEPs have added alternative ways to specify a package's dependencies.
|
||||
If you don't see any dependencies listed in the ``setup.py``, look for a
|
||||
``setup.cfg`` or ``pyproject.toml``. These files can be used to store the
|
||||
same ``install_requires`` information that ``setup.py`` used to use.
|
||||
|
||||
If you are write a recipe for a package that only distributes wheels,
|
||||
check the ``METADATA`` file for lines like::
|
||||
|
||||
Requires-Dist: azureml-core (~=1.11.0)
|
||||
Requires-Dist: azureml-dataset-runtime[fuse] (~=1.11.0)
|
||||
Requires-Dist: azureml-train (~=1.11.0)
|
||||
Requires-Dist: azureml-train-automl-client (~=1.11.0)
|
||||
Requires-Dist: azureml-pipeline (~=1.11.0)
|
||||
Provides-Extra: accel-models
|
||||
Requires-Dist: azureml-accel-models (~=1.11.0); extra == 'accel-models'
|
||||
Provides-Extra: automl
|
||||
Requires-Dist: azureml-train-automl (~=1.11.0); extra == 'automl'
|
||||
|
||||
|
||||
Lines that use ``Requires-Dist`` are similar to ``install_requires``.
|
||||
Lines that use ``Provides-Extra`` are similar to ``extra_requires``,
|
||||
and you can add a variant for those dependencies. The ``~=1.11.0``
|
||||
syntax is equivalent to ``1.11.0:1.11.999``.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
""""""""""
|
||||
|
||||
Setuptools is a bit of a special case. If a package requires setuptools
|
||||
at run-time, how do they express this? They could add it to
|
||||
``install_requires``, but setuptools is imported long before this and
|
||||
``install_requires``, but setuptools is imported long before this and is
|
||||
needed to read this line. And since you can't install the package
|
||||
without setuptools, the developers assume that setuptools will already
|
||||
be there, so they never mention when it is required. We don't want to
|
||||
@@ -580,11 +730,13 @@ By default, Spack runs:
|
||||
|
||||
if it detects that the ``setup.py`` file supports a ``test`` phase.
|
||||
You can add additional build-time or install-time tests by overriding
|
||||
``test`` and ``installtest``, respectively. For example, ``py-numpy``
|
||||
adds:
|
||||
``test`` or adding a custom install-time test function. For example,
|
||||
``py-numpy`` adds:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
install_time_test_callbacks = ['install_test', 'import_module_test']
|
||||
|
||||
def install_test(self):
|
||||
with working_dir('..'):
|
||||
python('-c', 'import numpy; numpy.test("full", verbose=2)')
|
||||
@@ -651,6 +803,8 @@ that the package uses the ``PythonPackage`` build system. However, there
|
||||
are occasionally packages that use ``PythonPackage`` that shouldn't
|
||||
start with ``py-``. For example:
|
||||
|
||||
* awscli
|
||||
* aws-parallelcluster
|
||||
* busco
|
||||
* easybuild
|
||||
* httpie
|
||||
@@ -736,8 +890,9 @@ non-Python dependencies. Anaconda contains many Python packages that
|
||||
are not yet in Spack, and Spack contains many Python packages that are
|
||||
not yet in Anaconda. The main advantage of Spack over Anaconda is its
|
||||
ability to choose a specific compiler and BLAS/LAPACK or MPI library.
|
||||
Spack also has better platform support for supercomputers. On the
|
||||
other hand, Anaconda offers Windows support.
|
||||
Spack also has better platform support for supercomputers, and can build
|
||||
optimized binaries for your specific microarchitecture. On the other hand,
|
||||
Anaconda offers Windows support.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
||||
@@ -12,5 +12,173 @@ RubyPackage
|
||||
Like Perl, Python, and R, Ruby has its own build system for
|
||||
installing Ruby gems.
|
||||
|
||||
This build system is a work-in-progress. See
|
||||
https://github.com/spack/spack/pull/3127 for more information.
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RubyPackage`` base class provides the following phases that
|
||||
can be overridden:
|
||||
|
||||
#. ``build`` - build everything needed to install
|
||||
#. ``install`` - install everything from build directory
|
||||
|
||||
For packages that come with a ``*.gemspec`` file, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ gem build *.gemspec
|
||||
$ gem install *.gem
|
||||
|
||||
|
||||
For packages that come with a ``Rakefile`` file, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ rake package
|
||||
$ gem install *.gem
|
||||
|
||||
|
||||
For packages that come pre-packaged as a ``*.gem`` file, the build
|
||||
phase is skipped and the install phase runs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ gem install *.gem
|
||||
|
||||
|
||||
These are all standard ``gem`` commands and can be found by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ gem help commands
|
||||
|
||||
|
||||
For packages that only distribute ``*.gem`` files, these files can be
|
||||
downloaded with the ``expand=False`` option in the ``version`` directive.
|
||||
The build phase will be automatically skipped.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
When building from source, Ruby packages can be identified by the
|
||||
presence of any of the following files:
|
||||
|
||||
* ``*.gemspec``
|
||||
* ``Rakefile``
|
||||
* ``setup.rb`` (not yet supported)
|
||||
|
||||
However, not all Ruby packages are released as source code. Some are only
|
||||
released as ``*.gem`` files. These files can be extracted using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ gem unpack *.gem
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Description
|
||||
^^^^^^^^^^^
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
|
||||
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
|
||||
|
||||
|
||||
Either of these can be used for the description of the Spack package.
|
||||
|
||||
^^^^^^^^
|
||||
Homepage
|
||||
^^^^^^^^
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
homepage = 'https://asciidoctor.org'
|
||||
|
||||
|
||||
This should be used as the official homepage of the Spack package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All Ruby packages require Ruby at build and run-time. For this reason,
|
||||
the base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('ruby')
|
||||
depends_on('ruby', type=('build', 'run'))
|
||||
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
required_ruby_version = '>= 2.3.0'
|
||||
|
||||
|
||||
This can be added to the Spack package using:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('ruby@2.3.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Ruby dependencies
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When you install a package with ``gem``, it reads the ``*.gemspec``
|
||||
file in order to determine the dependencies of the package.
|
||||
If the dependencies are not yet installed, ``gem`` downloads them
|
||||
and installs them for you. This may sound convenient, but Spack
|
||||
cannot rely on this behavior for two reasons:
|
||||
|
||||
#. Spack needs to be able to install packages on air-gapped networks.
|
||||
|
||||
If there is no internet connection, ``gem`` can't download the
|
||||
package dependencies. By explicitly listing every dependency in
|
||||
the ``package.py``, Spack knows what to download ahead of time.
|
||||
|
||||
#. Duplicate installations of the same dependency may occur.
|
||||
|
||||
Spack supports *activation* of Ruby extensions, which involves
|
||||
symlinking the package installation prefix to the Ruby installation
|
||||
prefix. If your package is missing a dependency, that dependency
|
||||
will be installed to the installation directory of the same package.
|
||||
If you try to activate the package + dependency, it may cause a
|
||||
problem if that package has already been activated.
|
||||
|
||||
For these reasons, you must always explicitly list all dependencies.
|
||||
Although the documentation may list the package's dependencies,
|
||||
often the developers assume people will use ``gem`` and won't have to
|
||||
worry about it. Always check the ``*.gemspec`` file to find the true
|
||||
dependencies.
|
||||
|
||||
Check for the following clues in the ``*.gemspec`` file:
|
||||
|
||||
* ``add_runtime_dependency``
|
||||
|
||||
These packages are required for installation.
|
||||
|
||||
* ``add_dependency``
|
||||
|
||||
This is an alias for ``add_runtime_dependency``
|
||||
|
||||
* ``add_development_dependency``
|
||||
|
||||
These packages are optional dependencies used for development.
|
||||
They should not be added as dependencies of the package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on Ruby packaging, see:
|
||||
https://guides.rubygems.org/
|
||||
|
||||
@@ -56,7 +56,7 @@ overridden like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
scons('check')
|
||||
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ username is not already in the path, Spack will append the value of ``$user`` to
|
||||
the selected ``build_stage`` path.
|
||||
|
||||
.. warning:: We highly recommend specifying ``build_stage`` paths that
|
||||
distinguish between staging and other activities to ensure
|
||||
distinguish between staging and other activities to ensure
|
||||
``spack clean`` does not inadvertently remove unrelated files.
|
||||
Spack prepends ``spack-stage-`` to temporary staging directory names to
|
||||
reduce this risk. Using a combination of ``spack`` and or ``stage`` in
|
||||
@@ -223,7 +223,7 @@ To build all software in serial, set ``build_jobs`` to 1.
|
||||
--------------------
|
||||
|
||||
When set to ``true`` Spack will use ccache to cache compiles. This is
|
||||
useful specifically in two cases: (1) when using ``spack setup``, and (2)
|
||||
useful specifically in two cases: (1) when using ``spack dev-build``, and (2)
|
||||
when building the same package with many different variants. The default is
|
||||
``false``.
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ Environments:
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
# Install the software, remove unnecessary deps
|
||||
RUN cd /opt/spack-environment && spack install && spack gc -y
|
||||
RUN cd /opt/spack-environment && spack env activate . && spack install && spack gc -y
|
||||
|
||||
# Strip all the binaries
|
||||
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \
|
||||
@@ -71,7 +71,7 @@ Environments:
|
||||
&& yum install -y libgomp \
|
||||
&& rm -rf /var/cache/yum && yum clean all
|
||||
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
|
||||
LABEL "app"="gromacs"
|
||||
@@ -165,7 +165,7 @@ of environments:
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
@@ -267,7 +267,7 @@ following ``Dockerfile``:
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
# Install the software, remove unnecessary deps
|
||||
RUN cd /opt/spack-environment && spack install && spack gc -y
|
||||
RUN cd /opt/spack-environment && spack env activate . && spack install && spack gc -y
|
||||
|
||||
# Strip all the binaries
|
||||
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \
|
||||
@@ -293,7 +293,7 @@ following ``Dockerfile``:
|
||||
&& yum install -y libgomp \
|
||||
&& rm -rf /var/cache/yum && yum clean all
|
||||
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
|
||||
LABEL "app"="gromacs"
|
||||
|
||||
@@ -27,17 +27,28 @@ correspond to one feature/bugfix/extension/etc. One can create PRs with
|
||||
changes relevant to different ideas, however reviewing such PRs becomes tedious
|
||||
and error prone. If possible, try to follow the **one-PR-one-package/feature** rule.
|
||||
|
||||
Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_
|
||||
branching model. The develop branch contains the latest contributions, and
|
||||
master is always tagged and points to the latest stable release. Therefore, when
|
||||
you send your request, make ``develop`` the destination branch on the
|
||||
`Spack repository <https://github.com/spack/spack>`_.
|
||||
--------
|
||||
Branches
|
||||
--------
|
||||
|
||||
Spack's ``develop`` branch has the latest contributions. Nearly all pull
|
||||
requests should start from ``develop`` and target ``develop``.
|
||||
|
||||
There is a branch for each major release series. Release branches
|
||||
originate from ``develop`` and have tags for each point release in the
|
||||
series. For example, ``releases/v0.14`` has tags for ``0.14.0``,
|
||||
``0.14.1``, ``0.14.2``, etc. versions of Spack. We backport important bug
|
||||
fixes to these branches, but we do not advance the package versions or
|
||||
make other changes that would change the way Spack concretizes
|
||||
dependencies. Currently, the maintainers manage these branches by
|
||||
cherry-picking from ``develop``. See :ref:`releases` for more
|
||||
information.
|
||||
|
||||
----------------------
|
||||
Continuous Integration
|
||||
----------------------
|
||||
|
||||
Spack uses `Travis CI <https://travis-ci.org/spack/spack>`_ for Continuous Integration
|
||||
Spack uses `Github Actions <https://docs.github.com/en/actions>`_ for Continuous Integration
|
||||
testing. This means that every time you submit a pull request, a series of tests will
|
||||
be run to make sure you didn't accidentally introduce any bugs into Spack. **Your PR
|
||||
will not be accepted until it passes all of these tests.** While you can certainly wait
|
||||
@@ -46,25 +57,24 @@ locally to speed up the review process.
|
||||
|
||||
.. note::
|
||||
|
||||
Oftentimes, Travis will fail for reasons other than a problem with your PR.
|
||||
Oftentimes, CI will fail for reasons other than a problem with your PR.
|
||||
For example, apt-get, pip, or homebrew will fail to download one of the
|
||||
dependencies for the test suite, or a transient bug will cause the unit tests
|
||||
to timeout. If Travis fails, click the "Details" link and click on the test(s)
|
||||
to timeout. If any job fails, click the "Details" link and click on the test(s)
|
||||
that is failing. If it doesn't look like it is failing for reasons related to
|
||||
your PR, you have two options. If you have write permissions for the Spack
|
||||
repository, you should see a "Restart job" button on the right-hand side. If
|
||||
repository, you should see a "Restart workflow" button on the right-hand side. If
|
||||
not, you can close and reopen your PR to rerun all of the tests. If the same
|
||||
test keeps failing, there may be a problem with your PR. If you notice that
|
||||
every recent PR is failing with the same error message, it may be that Travis
|
||||
is down or one of Spack's dependencies put out a new release that is causing
|
||||
problems. If this is the case, please file an issue.
|
||||
every recent PR is failing with the same error message, it may be that an issue
|
||||
occurred with the CI infrastructure or one of Spack's dependencies put out a
|
||||
new release that is causing problems. If this is the case, please file an issue.
|
||||
|
||||
|
||||
If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
|
||||
against Python 2.6, 2.7, and 3.4-3.7 on both macOS and Linux. We currently
|
||||
We currently test against Python 2.6, 2.7, and 3.5-3.7 on both macOS and Linux and
|
||||
perform 3 types of tests:
|
||||
|
||||
.. _cmd-spack-test:
|
||||
.. _cmd-spack-unit-test:
|
||||
|
||||
^^^^^^^^^^
|
||||
Unit Tests
|
||||
@@ -86,7 +96,7 @@ To run *all* of the unit tests, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test
|
||||
$ spack unit-test
|
||||
|
||||
These tests may take several minutes to complete. If you know you are
|
||||
only modifying a single Spack feature, you can run subsets of tests at a
|
||||
@@ -95,51 +105,53 @@ time. For example, this would run all the tests in
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test architecture.py
|
||||
$ spack unit-test lib/spack/spack/test/architecture.py
|
||||
|
||||
And this would run the ``test_platform`` test from that file:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test architecture.py::test_platform
|
||||
$ spack unit-test lib/spack/spack/test/architecture.py::test_platform
|
||||
|
||||
This allows you to develop iteratively: make a change, test that change,
|
||||
make another change, test that change, etc. We use `pytest
|
||||
<http://pytest.org/>`_ as our tests fromework, and these types of
|
||||
<http://pytest.org/>`_ as our tests framework, and these types of
|
||||
arguments are just passed to the ``pytest`` command underneath. See `the
|
||||
pytest docs
|
||||
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
|
||||
for more details on test selection syntax.
|
||||
|
||||
``spack test`` has a few special options that can help you understand
|
||||
what tests are available. To get a list of all available unit test
|
||||
files, run:
|
||||
``spack unit-test`` has a few special options that can help you
|
||||
understand what tests are available. To get a list of all available
|
||||
unit test files, run:
|
||||
|
||||
.. command-output:: spack test --list
|
||||
.. command-output:: spack unit-test --list
|
||||
:ellipsis: 5
|
||||
|
||||
To see a more detailed list of available unit tests, use ``spack test
|
||||
--list-long``:
|
||||
To see a more detailed list of available unit tests, use ``spack
|
||||
unit-test --list-long``:
|
||||
|
||||
.. command-output:: spack test --list-long
|
||||
.. command-output:: spack unit-test --list-long
|
||||
:ellipsis: 10
|
||||
|
||||
And to see the fully qualified names of all tests, use ``--list-names``:
|
||||
|
||||
.. command-output:: spack test --list-names
|
||||
.. command-output:: spack unit-test --list-names
|
||||
:ellipsis: 5
|
||||
|
||||
You can combine these with ``pytest`` arguments to restrict which tests
|
||||
you want to know about. For example, to see just the tests in
|
||||
``architecture.py``:
|
||||
|
||||
.. command-output:: spack test --list-long architecture.py
|
||||
.. command-output:: spack unit-test --list-long lib/spack/spack/test/architecture.py
|
||||
|
||||
You can also combine any of these options with a ``pytest`` keyword
|
||||
search. For example, to see the names of all tests that have "spec"
|
||||
search. See the `pytest usage docs
|
||||
<https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
|
||||
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
|
||||
or "concretize" somewhere in their names:
|
||||
|
||||
.. command-output:: spack test --list-names -k "spec and concretize"
|
||||
.. command-output:: spack unit-test --list-names -k "spec and concretize"
|
||||
|
||||
By default, ``pytest`` captures the output of all unit tests, and it will
|
||||
print any captured output for failed tests. Sometimes it's helpful to see
|
||||
@@ -149,7 +161,7 @@ argument to ``pytest``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test -s architecture.py::test_platform
|
||||
$ spack unit-test -s --list-long lib/spack/spack/test/architecture.py::test_platform
|
||||
|
||||
Unit tests are crucial to making sure bugs aren't introduced into
|
||||
Spack. If you are modifying core Spack libraries or adding new
|
||||
@@ -162,9 +174,9 @@ how to write tests!
|
||||
.. note::
|
||||
|
||||
You may notice the ``share/spack/qa/run-unit-tests`` script in the
|
||||
repository. This script is designed for Travis CI. It runs the unit
|
||||
repository. This script is designed for CI. It runs the unit
|
||||
tests and reports coverage statistics back to Codecov. If you want to
|
||||
run the unit tests yourself, we suggest you use ``spack test``.
|
||||
run the unit tests yourself, we suggest you use ``spack unit-test``.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Flake8 Tests
|
||||
@@ -235,7 +247,7 @@ to update them.
|
||||
|
||||
Try fixing flake8 errors in reverse order. This eliminates the need for
|
||||
multiple runs of ``spack flake8`` just to re-compute line numbers and
|
||||
makes it much easier to fix errors directly off of the Travis output.
|
||||
makes it much easier to fix errors directly off of the CI output.
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -315,7 +327,7 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd "$SPACK_ROOT/lib/spack/docs"
|
||||
$ cd path/to/spack/lib/spack/docs/
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
@@ -327,7 +339,7 @@ your PR is accepted.
|
||||
There is also a ``run-doc-tests`` script in ``share/spack/qa``. The only
|
||||
difference between running this script and running ``make`` by hand is that
|
||||
the script will exit immediately if it encounters an error or warning. This
|
||||
is necessary for Travis CI. If you made a lot of documentation changes, it is
|
||||
is necessary for CI. If you made a lot of documentation changes, it is
|
||||
much quicker to run ``make`` by hand so that you can see all of the warnings
|
||||
at once.
|
||||
|
||||
@@ -391,7 +403,7 @@ and allow you to see coverage line-by-line when viewing the Spack repository.
|
||||
If you are new to Spack, a great way to get started is to write unit tests to
|
||||
increase coverage!
|
||||
|
||||
Unlike with Travis, Codecov tests are not required to pass in order for your
|
||||
Unlike with CI on Github Actions Codecov tests are not required to pass in order for your
|
||||
PR to be merged. If you modify core Spack libraries, we would greatly
|
||||
appreciate unit tests that cover these changed lines. Otherwise, we have no
|
||||
way of knowing whether or not your changes introduce a bug. If you make
|
||||
|
||||
@@ -363,11 +363,12 @@ Developer commands
|
||||
``spack doc``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack test``
|
||||
^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack unit-test``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
See the :ref:`contributor guide section <cmd-spack-test>` on ``spack test``.
|
||||
See the :ref:`contributor guide section <cmd-spack-unit-test>` on
|
||||
``spack unit-test``.
|
||||
|
||||
.. _cmd-spack-python:
|
||||
|
||||
@@ -495,3 +496,398 @@ The bottom of the output shows the top most time consuming functions,
|
||||
slowest on top. The profiling support is from Python's built-in tool,
|
||||
`cProfile
|
||||
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
|
||||
|
||||
.. _releases:
|
||||
|
||||
--------
|
||||
Releases
|
||||
--------
|
||||
|
||||
This section documents Spack's release process. It is intended for
|
||||
project maintainers, as the tasks described here require maintainer
|
||||
privileges on the Spack repository. For others, we hope this section at
|
||||
least provides some insight into how the Spack project works.
|
||||
|
||||
.. _release-branches:
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version)
|
||||
|
|
||||
o merge v0.14.1 into develop
|
||||
|\
|
||||
| o branch: releases/v0.14, tag: v0.14.1
|
||||
o | merge v0.14.0 into develop
|
||||
|\|
|
||||
| o tag: v0.14.0
|
||||
|/
|
||||
o merge v0.13.2 into develop
|
||||
|\
|
||||
| o branch: releases/v0.13, tag: v0.13.2
|
||||
o | merge v0.13.1 into develop
|
||||
|\|
|
||||
| o tag: v0.13.1
|
||||
o | merge v0.13.0 into develop
|
||||
|\|
|
||||
| o tag: v0.13.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
||||
The ``develop`` branch has the latest contributions, and nearly all pull
|
||||
requests target ``develop``.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
|
||||
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
|
||||
To avoid version churn for users of a release series, minor releases
|
||||
should **not** make changes that would change the concretization of
|
||||
packages. They should generally only contain fixes to the Spack core.
|
||||
|
||||
Both major and minor releases are tagged. After each release, we merge
|
||||
the release branch back into ``develop`` so that the version bump and any
|
||||
other release-specific changes are visible in the mainline. As a
|
||||
convenience, we also tag the latest release as ``releases/latest``,
|
||||
so that users can easily check it out to get the latest
|
||||
stable version. See :ref:`merging-releases` for more details.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, here are two releases (from
|
||||
some past version of the page linked above):
|
||||
|
||||
.. image:: images/projects.png
|
||||
|
||||
Here, there's one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
|
||||
Spack's project boards are not firm commitments, and we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making Major Releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming you've already created a project board and completed the work
|
||||
for a major release, the steps to make the release are as follows:
|
||||
|
||||
#. Create two new project boards:
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
|
||||
#. Move any tasks that aren't done yet to one of the new project boards.
|
||||
Small bugfixes should go to the next point release. Major features,
|
||||
refactors, and changes that could affect concretization should go in
|
||||
the next major release.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
branch if you are preparing the ``X.Y.0`` release.
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.13.0
|
||||
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
|
||||
|
||||
#. Update the release version lists in these files to include the new version:
|
||||
|
||||
* ``lib/spack/spack/schema/container.py``
|
||||
* ``lib/spack/spack/container/images.json``
|
||||
|
||||
.. TODO: We should get rid of this step in some future release.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with major highlights in bullet form. Use
|
||||
proper markdown formatting, like `this example from 0.15.0
|
||||
<https://github.com/spack/spack/commit/d4bf70d9882fcfe88507e9cb444331d7dd7ba71c>`_.
|
||||
|
||||
#. Push the release branch to GitHub.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI is not passing, submit pull requests to ``develop`` as normal
|
||||
and keep rebasing the release branch on ``develop`` until CI passes.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making Point Releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This assumes you've already created a project board for a point release
|
||||
and completed the work to be done for the release. To make a point
|
||||
release:
|
||||
|
||||
#. Create one new project board for the next point release.
|
||||
|
||||
#. Move any cards that aren't done yet to the next project board.
|
||||
|
||||
#. Check out the release branch (it should already exist). For the
|
||||
``X.Y.Z`` release, the release branch is called ``releases/vX.Y``. For
|
||||
``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project onto the release branch.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests, which means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
the were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased, you'll need to cherry-pick each
|
||||
rebased commit individually. There have not been any rebased PRs like
|
||||
this in recent point releases.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking onto a point release, look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call:
|
||||
|
||||
1. If the dependency is small, you might just cherry-pick it, too.
|
||||
If you do this, add it to the release board.
|
||||
|
||||
2. If it is large, then you may decide that this fix is not worth
|
||||
including in a point release, in which case you should remove it
|
||||
from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Updaate the release version lists in these files to include the new version:
|
||||
|
||||
* ``lib/spack/spack/schema/container.py``
|
||||
* ``lib/spack/spack/container/images.json``
|
||||
|
||||
**TODO**: We should get rid of this step in some future release.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with a list of bugfixes. This is typically just a
|
||||
summary of the commits you cherry-picked onto the release branch. See
|
||||
`the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Push the release branch to GitHub.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _publishing-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Publishing a release on GitHub
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
#. Go to `github.com/spack/spack/releases
|
||||
<https://github.com/spack/spack/releases>`_ and click ``Draft a new
|
||||
release``. Set the following:
|
||||
|
||||
* ``Tag version`` should start with ``v`` and contain *all three*
|
||||
parts of the version, .g. ``v0.15.1``. This is the name of the tag
|
||||
that will be created.
|
||||
|
||||
* ``Target`` should be the ``releases/vX.Y`` branch (e.g., ``releases/v0.15``).
|
||||
|
||||
* ``Release title`` should be ``vX.Y.Z`` (To match the tag, e.g., ``v0.15.1``).
|
||||
|
||||
* For the text, paste the latest release markdown from your ``CHANGELOG.md``.
|
||||
|
||||
You can save the draft and keep coming back to this as you prepare the release.
|
||||
|
||||
#. When you are done, click ``Publish release``.
|
||||
|
||||
#. Immediately after publishing, go back to
|
||||
`github.com/spack/spack/releases
|
||||
<https://github.com/spack/spack/releases>`_ and download the
|
||||
auto-generated ``.tar.gz`` file for the release. It's the ``Source
|
||||
code (tar.gz)`` link.
|
||||
|
||||
#. Click ``Edit`` on the release you just did and attach the downloaded
|
||||
release tarball as a binary. This does two things:
|
||||
|
||||
#. Makes sure that the hash of our releases doesn't change over time.
|
||||
GitHub sometimes annoyingly changes they way they generate
|
||||
tarballs, and then hashes can change if you rely on the
|
||||
auto-generated tarball links.
|
||||
|
||||
#. Gets us download counts on releases visible through the GitHub
|
||||
API. GitHub tracks downloads of artifacts, but *not* the source
|
||||
links. See the `releases
|
||||
page <https://api.github.com/repos/spack/spack/releases>`_ and search
|
||||
for ``download_count`` to see this.
|
||||
|
||||
#. Go to `readthedocs.org <https://readthedocs.org/projects/spack>`_ and activate
|
||||
the release tag. This builds the documentation and makes the released version
|
||||
selectable in the versions menu.
|
||||
|
||||
|
||||
.. _merging-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest` and `develop`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --tags
|
||||
|
||||
The ``--force`` argument makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
|
||||
We also merge each release that we tag as ``releases/latest`` into ``develop``.
|
||||
Make sure to do this with a merge commit:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git merge --no-ff vX.Y.Z # vX.Y.Z is the new release's tag
|
||||
$ git push
|
||||
|
||||
We merge back to ``develop`` because it:
|
||||
|
||||
* updates the version and ``CHANGELOG.md`` on ``develop``.
|
||||
* ensures that your release tag is reachable from the head of
|
||||
``develop``
|
||||
|
||||
We *must* use a real merge commit (via the ``--no-ff`` option) because it
|
||||
ensures that the release tag is reachable from the tip of ``develop``.
|
||||
This is necessary for ``spack -V`` to work properly -- it uses ``git
|
||||
describe --tags`` to find the last reachable tag in the repository and
|
||||
reports how far we are from it. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -V
|
||||
0.14.2-1486-b80d5e74e5
|
||||
|
||||
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
|
||||
ahead of the ``0.14.2`` release.
|
||||
|
||||
We put this step last in the process because it's best to do it only once
|
||||
the release is complete and tagged. If you do it before you've tagged the
|
||||
release and later decide you want to tag some later commit, you'll need
|
||||
to merge again.
|
||||
|
||||
.. _announcing-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Announcing a release
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Make a tweet to announce the release. It should link to the release's
|
||||
page on GitHub. You can base it on `this example tweet
|
||||
<https://twitter.com/spackpm/status/1231761858182307840>`_.
|
||||
|
||||
#. Ping ``@channel`` in ``#general`` on Slack (`spackpm.slack.com
|
||||
<https://spackpm.slack.com>`_) with a link to the tweet. The tweet
|
||||
will be shown inline so that you do not have to retype your release
|
||||
announcement.
|
||||
|
||||
#. Email the Spack mailing list to let them know about the release. As
|
||||
with the tweet, you likely want to link to the release's page on
|
||||
GitHub. It's also helpful to include some information directly in the
|
||||
email. You can base yours on this `example email
|
||||
<https://groups.google.com/forum/#!topic/spack/WT4CT9i_X4s>`_.
|
||||
|
||||
Once you've announced the release, congratulations, you're done! You've
|
||||
finished making the release!
|
||||
|
||||
@@ -87,11 +87,12 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack test``:
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test --extension=scripting
|
||||
$ spack unit-test --extension=scripting
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
|
||||
@@ -48,8 +48,8 @@ platform, all on the command line.
|
||||
# Add compiler flags using the conventional names
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags="-O3 -floop-block"
|
||||
|
||||
# Cross-compile for a different architecture with arch=
|
||||
$ spack install mpileaks@1.1.2 arch=bgqos_0
|
||||
# Cross-compile for a different micro-architecture with target=
|
||||
$ spack install mpileaks@1.1.2 target=icelake
|
||||
|
||||
Users can specify as many or few options as they care about. Spack
|
||||
will fill in the unspecified values with sensible defaults. The two listed
|
||||
|
||||
@@ -19,6 +19,9 @@ before Spack is run:
|
||||
#. Python 2 (2.6 or 2.7) or 3 (3.5 - 3.8) to run Spack
|
||||
#. A C/C++ compiler for building
|
||||
#. The ``make`` executable for building
|
||||
#. The ``tar``, ``gzip``, ``bzip2``, ``xz`` and optionally ``zstd``
|
||||
executables for extracting source code
|
||||
#. The ``patch`` command to apply patches
|
||||
#. The ``git`` and ``curl`` commands for fetching
|
||||
#. If using the ``gpg`` subcommand, ``gnupg2`` is required
|
||||
|
||||
@@ -50,22 +53,37 @@ in the ``SPACK_ROOT`` environment variable. Add ``$SPACK_ROOT/bin``
|
||||
to your path and you're ready to go:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
|
||||
# For bash/zsh users
|
||||
$ export SPACK_ROOT=/path/to/spack
|
||||
$ export PATH=$SPACK_ROOT/bin:$PATH
|
||||
|
||||
# For tsch/csh users
|
||||
$ setenv SPACK_ROOT /path/to/spack
|
||||
$ setenv PATH $SPACK_ROOT/bin:$PATH
|
||||
|
||||
# For fish users
|
||||
$ set -x SPACK_ROOT /path/to/spack
|
||||
$ set -U fish_user_paths /path/to/spack $fish_user_paths
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install libelf
|
||||
|
||||
For a richer experience, use Spack's shell support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Note you must set SPACK_ROOT
|
||||
|
||||
# For bash/zsh users
|
||||
$ export SPACK_ROOT=/path/to/spack
|
||||
$ . $SPACK_ROOT/share/spack/setup-env.sh
|
||||
|
||||
# For tcsh or csh users (note you must set SPACK_ROOT)
|
||||
$ setenv SPACK_ROOT /path/to/spack
|
||||
# For tcsh/csh users
|
||||
$ source $SPACK_ROOT/share/spack/setup-env.csh
|
||||
|
||||
# For fish users
|
||||
$ source $SPACK_ROOT/share/spack/setup-env.fish
|
||||
|
||||
This automatically adds Spack to your ``PATH`` and allows the ``spack``
|
||||
command to be used to execute spack :ref:`commands <shell-support>` and
|
||||
@@ -712,8 +730,9 @@ an OpenMPI installed in /opt/local, one would use:
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.10.1: /opt/local
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
buildable: False
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
@@ -775,8 +794,9 @@ Then add the following to ``~/.spack/packages.yaml``:
|
||||
|
||||
packages:
|
||||
openssl:
|
||||
paths:
|
||||
openssl@1.0.2g: /usr
|
||||
externals:
|
||||
- spec: openssl@1.0.2g
|
||||
prefix: /usr
|
||||
buildable: False
|
||||
|
||||
|
||||
@@ -791,8 +811,9 @@ to add the following to ``packages.yaml``:
|
||||
|
||||
packages:
|
||||
netlib-lapack:
|
||||
paths:
|
||||
netlib-lapack@3.6.1: /usr
|
||||
externals:
|
||||
- spec: netlib-lapack@3.6.1
|
||||
prefix: /usr
|
||||
buildable: False
|
||||
all:
|
||||
providers:
|
||||
@@ -818,7 +839,7 @@ Git
|
||||
|
||||
Some Spack packages use ``git`` to download, which might not work on
|
||||
some computers. For example, the following error was
|
||||
encountered on a Macintosh during ``spack install julia-master``:
|
||||
encountered on a Macintosh during ``spack install julia@master``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1181,9 +1202,13 @@ Here's an example of an external configuration for cray modules:
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
modules:
|
||||
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
|
||||
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
all:
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
@@ -1195,7 +1220,7 @@ via module load.
|
||||
|
||||
.. note::
|
||||
|
||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``paths:``
|
||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
||||
compiler wrapper link line).
|
||||
@@ -1211,19 +1236,31 @@ Here is an example of a full packages.yaml used at NERSC
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
modules:
|
||||
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
|
||||
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
buildable: False
|
||||
netcdf:
|
||||
modules:
|
||||
netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
|
||||
netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
|
||||
externals:
|
||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
buildable: False
|
||||
hdf5:
|
||||
modules:
|
||||
hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
|
||||
hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
|
||||
externals:
|
||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
buildable: False
|
||||
all:
|
||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||
@@ -1247,6 +1284,6 @@ environment variables may be propagated into containers that are not
|
||||
using the Cray programming environment.
|
||||
|
||||
To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``CRAYPE_VERSION``. This
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
BIN
lib/spack/docs/images/pr-commit.png
Normal file
BIN
lib/spack/docs/images/pr-commit.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
BIN
lib/spack/docs/images/projects.png
Normal file
BIN
lib/spack/docs/images/projects.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 68 KiB |
@@ -14,7 +14,7 @@ problems if you encounter them.
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed in the next release
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
@@ -49,15 +49,29 @@ A workaround is to explicitly activate the variants of dependencies as well:
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
----------------------------
|
||||
``spack setup`` doesn't work
|
||||
----------------------------
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
**Status:** Work in progress
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
Spack provides a ``setup`` command that is useful for the development of
|
||||
software outside of Spack. Unfortunately, this command no longer works.
|
||||
See https://github.com/spack/spack/issues/2597 and
|
||||
https://github.com/spack/spack/issues/2662 for details. This is expected
|
||||
to be fixed by https://github.com/spack/spack/pull/2664.
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
||||
|
||||
@@ -645,7 +645,7 @@ multiple fields based on delimiters such as ``.``, ``-`` etc. Then
|
||||
matching fields are compared using the rules below:
|
||||
|
||||
#. The following develop-like strings are greater (newer) than all
|
||||
numbers and are ordered as ``develop > master > head > trunk``.
|
||||
numbers and are ordered as ``develop > main > master > head > trunk``.
|
||||
|
||||
#. Numbers are all less than the chosen develop-like strings above,
|
||||
and are sorted numerically.
|
||||
@@ -1967,22 +1967,29 @@ exactly what kind of a dependency you need. For example:
|
||||
depends_on('cmake', type='build')
|
||||
depends_on('py-numpy', type=('build', 'run'))
|
||||
depends_on('libelf', type=('build', 'link'))
|
||||
depends_on('py-pytest', type='test')
|
||||
|
||||
The following dependency types are available:
|
||||
|
||||
* **"build"**: made available during the project's build. The package will
|
||||
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
|
||||
Other projects which depend on this one will not have these modified
|
||||
(building project X doesn't need project Y's build dependencies).
|
||||
* **"link"**: the project is linked to by the project. The package will be
|
||||
added to the current package's ``rpath``.
|
||||
* **"run"**: the project is used by the project at runtime. The package will
|
||||
be added to ``PATH`` and ``PYTHONPATH``.
|
||||
* **"build"**: the dependency will be added to the ``PATH`` and
|
||||
``PYTHONPATH`` at build-time.
|
||||
* **"link"**: the dependency will be added to Spack's compiler
|
||||
wrappers, automatically injecting the appropriate linker flags,
|
||||
including ``-I``, ``-L``, and RPATH/RUNPATH handling.
|
||||
* **"run"**: the dependency will be added to the ``PATH`` and
|
||||
``PYTHONPATH`` at run-time. This is true for both ``spack load``
|
||||
and the module files Spack writes.
|
||||
* **"test"**: the dependency will be added to the ``PATH`` and
|
||||
``PYTHONPATH`` at build-time. The only difference between
|
||||
"build" and "test" is that test dependencies are only built
|
||||
if the user requests unit tests with ``spack install --test``.
|
||||
|
||||
One of the advantages of the ``build`` dependency type is that although the
|
||||
dependency needs to be installed in order for the package to be built, it
|
||||
can be uninstalled without concern afterwards. ``link`` and ``run`` disallow
|
||||
this because uninstalling the dependency would break the package.
|
||||
this because uninstalling the dependency would break the package. Another
|
||||
consequence of this is that ``build``-only dependencies do not affect the
|
||||
hash of the package. The same is true for ``test`` dependencies.
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
@@ -2003,7 +2010,8 @@ package. In that case, you could say something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('mpi', default=False)
|
||||
variant('mpi', default=False, description='Enable MPI support')
|
||||
|
||||
depends_on('mpi', when='+mpi')
|
||||
|
||||
``when`` can include constraints on the variant, version, compiler, etc. and
|
||||
@@ -4054,21 +4062,223 @@ File functions
|
||||
Making a package discoverable with ``spack external find``
|
||||
----------------------------------------------------------
|
||||
|
||||
To make a package discoverable with
|
||||
:ref:`spack external find <cmd-spack-external-find>` you must
|
||||
define one or more executables associated with the package and must
|
||||
implement a method to generate a Spec when given an executable.
|
||||
The simplest way to make a package discoverable with
|
||||
:ref:`spack external find <cmd-spack-external-find>` is to:
|
||||
|
||||
The executables are specified as a package level ``executables``
|
||||
attribute which is a list of strings (see example below); each string
|
||||
is treated as a regular expression (e.g. 'gcc' would match 'gcc', 'gcc-8.3',
|
||||
'my-weird-gcc', etc.).
|
||||
1. Define the executables associated with the package
|
||||
2. Implement a method to determine the versions of these executables
|
||||
|
||||
The method ``determine_spec_details`` has the following signature:
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Minimal detection
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The first step is fairly simple, as it requires only to
|
||||
specify a package level ``executables`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def determine_spec_details(prefix, exes_in_prefix):
|
||||
class Foo(Package):
|
||||
# Each string provided here is treated as a regular expression, and
|
||||
# would match for example 'foo', 'foobar', and 'bazfoo'.
|
||||
executables = ['foo']
|
||||
|
||||
This attribute must be a list of strings. Each string is a regular
|
||||
expression (e.g. 'gcc' would match 'gcc', 'gcc-8.3', 'my-weird-gcc', etc.) to
|
||||
determine a set of system executables that might be part or this package. Note
|
||||
that to match only executables named 'gcc' the regular expression ``'^gcc$'``
|
||||
must be used.
|
||||
|
||||
Finally to determine the version of each executable the ``determine_version``
|
||||
method must be implemented:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
"""Return either the version of the executable passed as argument
|
||||
or ``None`` if the version cannot be determined.
|
||||
|
||||
Args:
|
||||
exe (str): absolute path to the executable being examined
|
||||
"""
|
||||
|
||||
This method receives as input the path to a single executable and must return
|
||||
as output its version as a string; if the user cannot determine the version
|
||||
or determines that the executable is not an instance of the package, they can
|
||||
return None and the exe will be discarded as a candidate.
|
||||
Implementing the two steps above is mandatory, and gives the package the
|
||||
basic ability to detect if a spec is present on the system at a given version.
|
||||
|
||||
.. note::
|
||||
Any executable for which the ``determine_version`` method returns ``None``
|
||||
will be discarded and won't appear in later stages of the workflow described below.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Additional functionality
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Besides the two mandatory steps described above, there are also optional
|
||||
methods that can be implemented to either increase the amount of details
|
||||
being detected or improve the robustness of the detection logic in a package.
|
||||
|
||||
""""""""""""""""""""""""""""""
|
||||
Variants and custom attributes
|
||||
""""""""""""""""""""""""""""""
|
||||
|
||||
The ``determine_variants`` method can be optionally implemented in a package
|
||||
to detect additional details of the spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes, version_str):
|
||||
"""Return either a variant string, a tuple of a variant string
|
||||
and a dictionary of extra attributes that will be recorded in
|
||||
packages.yaml or a list of those items.
|
||||
|
||||
Args:
|
||||
exes (list of str): list of executables (absolute paths) that
|
||||
live in the same prefix and share the same version
|
||||
version_str (str): version associated with the list of
|
||||
executables, as detected by ``determine_version``
|
||||
"""
|
||||
|
||||
This method takes as input a list of executables that live in the same prefix and
|
||||
share the same version string, and returns either:
|
||||
|
||||
1. A variant string
|
||||
2. A tuple of a variant string and a dictionary of extra attributes
|
||||
3. A list of items matching either 1 or 2 (if multiple specs are detected
|
||||
from the set of executables)
|
||||
|
||||
If extra attributes are returned, they will be recorded in ``packages.yaml``
|
||||
and be available for later reuse. As an example, the ``gcc`` package will record
|
||||
by default the different compilers found and an entry in ``packages.yaml``
|
||||
would look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: 'gcc@9.0.1 languages=c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/x86_64-linux-gnu-gcc-9
|
||||
c++: /usr/bin/x86_64-linux-gnu-g++-9
|
||||
fortran: /usr/bin/x86_64-linux-gnu-gfortran-9
|
||||
|
||||
This allows us, for instance, to keep track of executables that would be named
|
||||
differently if built by Spack (e.g. ``x86_64-linux-gnu-gcc-9``
|
||||
instead of just ``gcc``).
|
||||
|
||||
.. TODO: we need to gather some more experience on overriding 'prefix'
|
||||
and other special keywords in extra attributes, but as soon as we are
|
||||
confident that this is the way to go we should document the process.
|
||||
See https://github.com/spack/spack/pull/16526#issuecomment-653783204
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Filter matching executables
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
Sometimes defining the appropriate regex for the ``executables``
|
||||
attribute might prove to be difficult, especially if one has to
|
||||
deal with corner cases or exclude "red herrings". To help keeping
|
||||
the regular expressions as simple as possible, each package can
|
||||
optionally implement a ``filter_executables`` method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
"""Return a filtered list of the executables in prefix"""
|
||||
|
||||
which takes as input a prefix and a list of matching executables and
|
||||
returns a filtered list of said executables.
|
||||
|
||||
Using this method has the advantage of allowing custom logic for
|
||||
filtering, and does not restrict the user to regular expressions
|
||||
only. Consider the case of detecting the GNU C++ compiler. If we
|
||||
try to search for executables that match ``g++``, that would have
|
||||
the unwanted side effect of selecting also ``clang++`` - which is
|
||||
a C++ compiler provided by another package - if present on the system.
|
||||
Trying to select executables that contain ``g++`` but not ``clang``
|
||||
would be quite complicated to do using regex only. Employing the
|
||||
``filter_detected_exes`` method it becomes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Gcc(Package):
|
||||
executables = ['g++']
|
||||
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
return [x for x in exes_in_prefix if 'clang' not in x]
|
||||
|
||||
Another possibility that this method opens is to apply certain
|
||||
filtering logic when specific conditions are met (e.g. take some
|
||||
decisions on an OS and not on another).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Validate detection
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To increase detection robustness, packagers may also implement a method
|
||||
to validate the detected Spec objects:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Validate a detected spec. Raise an exception if validation fails."""
|
||||
|
||||
This method receives a detected spec along with its extra attributes and can be
|
||||
used to check that certain conditions are met by the spec. Packagers can either
|
||||
use assertions or raise an ``InvalidSpecDetected`` exception when the check fails.
|
||||
In case the conditions are not honored the spec will be discarded and any message
|
||||
associated with the assertion or the exception will be logged as the reason for
|
||||
discarding it.
|
||||
|
||||
As an example, a package that wants to check that the ``compilers`` attribute is
|
||||
in the extra attributes can implement this method like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that 'compilers' is in the extra attributes."""
|
||||
msg = ('the extra attribute "compilers" must be set for '
|
||||
'the detected spec "{0}"'.format(spec))
|
||||
assert 'compilers' in extra_attributes, msg
|
||||
|
||||
or like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that 'compilers' is in the extra attributes."""
|
||||
if 'compilers' not in extra_attributes:
|
||||
msg = ('the extra attribute "compilers" must be set for '
|
||||
'the detected spec "{0}"'.format(spec))
|
||||
raise InvalidSpecDetected(msg)
|
||||
|
||||
.. _determine_spec_details:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Custom detection workflow
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In the rare case when the mechanisms described so far don't fit the
|
||||
detection of a package, the implementation of all the methods above
|
||||
can be disregarded and instead a custom ``determine_spec_details``
|
||||
method can be implemented directly in the package class (note that
|
||||
the definition of the ``executables`` attribute is still required):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@classmethod
|
||||
def determine_spec_details(cls, prefix, exes_in_prefix):
|
||||
# exes_in_prefix = a set of paths, each path is an executable
|
||||
# prefix = a prefix that is common to each path in exes_in_prefix
|
||||
|
||||
@@ -4076,14 +4286,13 @@ The method ``determine_spec_details`` has the following signature:
|
||||
# the package. Return one or more Specs for each instance of the
|
||||
# package which is thought to be installed in the provided prefix
|
||||
|
||||
``determine_spec_details`` takes as parameters a set of discovered
|
||||
executables (which match those specified by the user) as well as a
|
||||
common prefix shared by all of those executables. The function must
|
||||
return one or more Specs associated with the executables (it can also
|
||||
return ``None`` to indicate that no provided executables are associated
|
||||
with the package).
|
||||
This method takes as input a set of discovered executables (which match
|
||||
those specified by the user) as well as a common prefix shared by all
|
||||
of those executables. The function must return one or more :py:class:`spack.spec.Spec` associated
|
||||
with the executables (it can also return ``None`` to indicate that no
|
||||
provided executables are associated with the package).
|
||||
|
||||
Say for example we have a package called ``foo-package`` which
|
||||
As an example, consider a made-up package called ``foo-package`` which
|
||||
builds an executable called ``foo``. ``FooPackage`` would appear as
|
||||
follows:
|
||||
|
||||
@@ -4107,10 +4316,12 @@ follows:
|
||||
return
|
||||
# This implementation is lazy and only checks the first candidate
|
||||
exe_path = candidates[0]
|
||||
exe = spack.util.executable.Executable(exe_path)
|
||||
output = exe('--version')
|
||||
exe = Executable(exe_path)
|
||||
output = exe('--version', output=str, error=str)
|
||||
version_str = ... # parse output for version string
|
||||
return Spec('foo-package@{0}'.format(version_str))
|
||||
return Spec.from_detection(
|
||||
'foo-package@{0}'.format(version_str)
|
||||
)
|
||||
|
||||
.. _package-lifecycle:
|
||||
|
||||
@@ -4474,119 +4685,3 @@ might write:
|
||||
DWARF_PREFIX = $(spack location --install-dir libdwarf)
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build System Configuration Support
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Imagine a developer creating a CMake or Autotools-based project in a
|
||||
local directory, which depends on libraries A-Z. Once Spack has
|
||||
installed those dependencies, one would like to run ``cmake`` with
|
||||
appropriate command line and environment so CMake can find them. The
|
||||
``spack setup`` command does this conveniently, producing a CMake
|
||||
configuration that is essentially the same as how Spack *would have*
|
||||
configured the project. This can be demonstrated with a usage
|
||||
example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd myproject
|
||||
$ spack setup myproject@local
|
||||
$ mkdir build; cd build
|
||||
$ ../spconfig.py ..
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
Notes:
|
||||
|
||||
* Spack must have ``myproject/package.py`` in its repository for
|
||||
this to work.
|
||||
* ``spack setup`` produces the executable script ``spconfig.py`` in
|
||||
the local directory, and also creates the module file for the
|
||||
package. ``spconfig.py`` is normally run from the user's
|
||||
out-of-source build directory.
|
||||
* The version number given to ``spack setup`` is arbitrary, just
|
||||
like ``spack diy``. ``myproject/package.py`` does not need to
|
||||
have any valid downloadable versions listed (typical when a
|
||||
project is new).
|
||||
* spconfig.py produces a CMake configuration that *does not* use the
|
||||
Spack wrappers. Any resulting binaries *will not* use RPATH,
|
||||
unless the user has enabled it. This is recommended for
|
||||
development purposes, not production.
|
||||
* ``spconfig.py`` is human readable, and can serve as a developer
|
||||
reference of what dependencies are being used.
|
||||
* ``make install`` installs the package into the Spack repository,
|
||||
where it may be used by other Spack packages.
|
||||
* CMake-generated makefiles re-run CMake in some circumstances. Use
|
||||
of ``spconfig.py`` breaks this behavior, requiring the developer
|
||||
to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
|
||||
has changed.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
CMakePackage
|
||||
^^^^^^^^^^^^
|
||||
|
||||
In order to enable ``spack setup`` functionality, the author of
|
||||
``myproject/package.py`` must subclass from ``CMakePackage`` instead
|
||||
of the standard ``Package`` superclass. Because CMake is
|
||||
standardized, the packager does not need to tell Spack how to run
|
||||
``cmake; make; make install``. Instead the packager only needs to
|
||||
create (optional) methods ``configure_args()`` and ``configure_env()``, which
|
||||
provide the arguments (as a list) and extra environment variables (as
|
||||
a dict) to provide to the ``cmake`` command. Usually, these will
|
||||
translate variant flags into CMake definitions. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
return [
|
||||
'-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
|
||||
'-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
|
||||
'-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
|
||||
'-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
|
||||
'-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')
|
||||
]
|
||||
|
||||
If needed, a packager may also override methods defined in
|
||||
``StagedPackage`` (see below).
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
StagedPackage
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
``CMakePackage`` is implemented by subclassing the ``StagedPackage``
|
||||
superclass, which breaks down the standard ``Package.install()``
|
||||
method into several sub-stages: ``setup``, ``configure``, ``build``
|
||||
and ``install``. Details:
|
||||
|
||||
* Instead of implementing the standard ``install()`` method, package
|
||||
authors implement the methods for the sub-stages
|
||||
``install_setup()``, ``install_configure()``,
|
||||
``install_build()``, and ``install_install()``.
|
||||
|
||||
* The ``spack install`` command runs the sub-stages ``configure``,
|
||||
``build`` and ``install`` in order. (The ``setup`` stage is
|
||||
not run by default; see below).
|
||||
* The ``spack setup`` command runs the sub-stages ``setup``
|
||||
and a dummy install (to create the module file).
|
||||
* The sub-stage install methods take no arguments (other than
|
||||
``self``). The arguments ``spec`` and ``prefix`` to the standard
|
||||
``install()`` method may be accessed via ``self.spec`` and
|
||||
``self.prefix``.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
GNU Autotools
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
The ``setup`` functionality is currently only available for
|
||||
CMake-based packages. Extending this functionality to GNU
|
||||
Autotools-based packages would be easy (and should be done by a
|
||||
developer who actively uses Autotools). Packages that use
|
||||
non-standard build systems can gain ``setup`` functionality by
|
||||
subclassing ``StagedPackage`` directly.
|
||||
|
||||
.. Emacs local variables
|
||||
Local Variables:
|
||||
fill-column: 79
|
||||
End:
|
||||
|
||||
@@ -45,7 +45,7 @@ for setting up a build pipeline are as follows:
|
||||
tags:
|
||||
- <custom-tag>
|
||||
script:
|
||||
- spack env activate .
|
||||
- spack env activate --without-view .
|
||||
- spack ci generate
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
artifacts:
|
||||
@@ -82,9 +82,9 @@ or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), thou
|
||||
topics are outside the scope of this document.
|
||||
|
||||
Spack's pipelines are now making use of the
|
||||
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>` syntax to run
|
||||
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>`_ syntax to run
|
||||
dynamically generated
|
||||
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`.
|
||||
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`_.
|
||||
Note that the use of dynamic child pipelines requires running Gitlab version
|
||||
``>= 12.9``.
|
||||
|
||||
@@ -122,6 +122,10 @@ pipeline jobs.
|
||||
Concretizes the specs in the active environment, stages them (as described in
|
||||
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
|
||||
|
||||
This sub-command takes two arguments, but the most useful is ``--output-file``,
|
||||
which should be an absolute path (including file name) to the generated
|
||||
pipeline, if the default (``./.gitlab-ci.yml``) is not desired.
|
||||
|
||||
.. _cmd-spack-ci-rebuild:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -132,6 +136,10 @@ This sub-command is responsible for ensuring a single spec from the release
|
||||
environment is up to date on the remote mirror configured in the environment,
|
||||
and as such, corresponds to a single job in the ``.gitlab-ci.yml`` file.
|
||||
|
||||
Rather than taking command-line arguments, this sub-command expects information
|
||||
to be communicated via environment variables, which will typically come via the
|
||||
``.gitlab-ci.yml`` job as ``variables``.
|
||||
|
||||
------------------------------------
|
||||
A pipeline-enabled spack environment
|
||||
------------------------------------
|
||||
@@ -189,15 +197,33 @@ corresponds to a known gitlab runner, where the ``match`` section is used
|
||||
in assigning a release spec to one of the runners, and the ``runner-attributes``
|
||||
section is used to configure the spec/job for that particular runner.
|
||||
|
||||
Both the top-level ``gitlab-ci`` section as well as each ``runner-attributes``
|
||||
section can also contain the following keys: ``image``, ``tags``, ``variables``,
|
||||
``before_script``, ``script``, and ``after_script``. If any of these keys are
|
||||
provided at the ``gitlab-ci`` level, they will be used as the defaults for any
|
||||
``runner-attributes``, unless they are overridden in those sections. Specifying
|
||||
any of these keys at the ``runner-attributes`` level generally overrides the
|
||||
keys specified at the higher level, with a couple exceptions. Any ``variables``
|
||||
specified at both levels result in those dictionaries getting merged in the
|
||||
resulting generated job, and any duplicate variable names get assigned the value
|
||||
provided in the specific ``runner-attributes``. If ``tags`` are specified both
|
||||
at the ``gitlab-ci`` level as well as the ``runner-attributes`` level, then the
|
||||
lists of tags are combined, and any duplicates are removed.
|
||||
|
||||
See the section below on using a custom spack for an example of how these keys
|
||||
could be used.
|
||||
|
||||
There are other pipeline options you can configure within the ``gitlab-ci`` section
|
||||
as well. The ``bootstrap`` section allows you to specify lists of specs from
|
||||
as well.
|
||||
|
||||
The ``bootstrap`` section allows you to specify lists of specs from
|
||||
your ``definitions`` that should be staged ahead of the environment's ``specs`` (this
|
||||
section is described in more detail below). The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``). The ``enable-debug-messages`` key takes a boolean
|
||||
and allows you to choose whether the pipeline build jobs are run as ``spack -d ci rebuild``
|
||||
or just ``spack ci rebuild`` (the default is not to enable debug messages). The
|
||||
provide this option is ``False``).
|
||||
|
||||
The
|
||||
``final-stage-rebuild-index`` section controls whether an extra job is added to the
|
||||
end of your pipeline (in a stage by itself) which will regenerate the mirror's
|
||||
buildcache index. Under normal operation, each pipeline job that rebuilds a package
|
||||
@@ -220,6 +246,11 @@ progresses, this build group may have jobs added or removed. The url, project,
|
||||
and site are used to specify the CDash instance to which build results should
|
||||
be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/gitlab_ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Assignment of specs to runners
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -245,7 +276,18 @@ runners known to the gitlab instance. For Docker executor type runners, the
|
||||
as well as an ``entrypoint`` to override whatever the default for that image is).
|
||||
For other types of runners the ``variables`` key will be useful to pass any
|
||||
information on to the runner that it needs to do its work (e.g. scheduler
|
||||
parameters, etc.).
|
||||
parameters, etc.). Any ``variables`` provided here will be added, verbatim, to
|
||||
each job.
|
||||
|
||||
The ``runner-attributes`` section also allows users to supply custom ``script``,
|
||||
``before_script``, and ``after_script`` sections to be applied to every job
|
||||
scheduled on that runner. This allows users to do any custom preparation or
|
||||
cleanup tasks that fit their particular workflow, as well as completely
|
||||
customize the rebuilding of a spec if they so choose. Spack will not generate
|
||||
a ``before_script`` or ``after_script`` for jobs, but if you do not provide
|
||||
a custom ``script``, spack will generate one for you that assumes your
|
||||
``spack.yaml`` is at the root of the repository, activates that environment for
|
||||
you, and invokes ``spack ci rebuild``.
|
||||
|
||||
.. _staging_algorithm:
|
||||
|
||||
@@ -256,8 +298,8 @@ Summary of ``.gitlab-ci.yml`` generation algorithm
|
||||
All specs yielded by the matrix (or all the specs in the environment) have their
|
||||
dependencies computed, and the entire resulting set of specs are staged together
|
||||
before being run through the ``gitlab-ci/mappings`` entries, where each staged
|
||||
spec is assigned a runner. "Staging" is the name we have given to the process
|
||||
of figuring out in what order the specs should be built, taking into consideration
|
||||
spec is assigned a runner. "Staging" is the name given to the process of
|
||||
figuring out in what order the specs should be built, taking into consideration
|
||||
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
|
||||
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
|
||||
any stage only depend on jobs in previous stages (since those jobs are guaranteed
|
||||
@@ -268,7 +310,7 @@ a runner, the ``.gitlab-ci.yml`` is written to disk.
|
||||
|
||||
The short example provided above would result in the ``readline``, ``ncurses``,
|
||||
and ``pkgconf`` packages getting staged and built on the runner chosen by the
|
||||
``spack-k8s`` tag. In this example, we assume the runner is a Docker executor
|
||||
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
|
||||
type runner, and thus certain jobs will be run in the ``centos7`` container,
|
||||
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
|
||||
will contain 6 jobs in three stages. Once the jobs have been generated, the
|
||||
@@ -327,12 +369,12 @@ Here's an example of what bootstrapping some compilers might look like:
|
||||
# mappings similar to the example higher up in this description
|
||||
...
|
||||
|
||||
In the example above, we have added a list to the ``definitions`` called
|
||||
``compiler-pkgs`` (you can add any number of these), which lists compiler packages
|
||||
we want to be staged ahead of the full matrix of release specs (which consists
|
||||
only of readline in our example). Then within the ``gitlab-ci`` section, we
|
||||
have added a ``bootstrap`` section, which can contain a list of items, each
|
||||
referring to a list in the ``definitions`` section. These items can either
|
||||
The example above adds a list to the ``definitions`` called ``compiler-pkgs``
|
||||
(you can add any number of these), which lists compiler packages that should
|
||||
be staged ahead of the full matrix of release specs (in this example, only
|
||||
readline). Then within the ``gitlab-ci`` section, note the addition of a
|
||||
``bootstrap`` section, which can contain a list of items, each referring to
|
||||
a list in the ``definitions`` section. These items can either
|
||||
be a dictionary or a string. If you supply a dictionary, it must have a name
|
||||
key whose value must match one of the lists in definitions and it can have a
|
||||
``compiler-agnostic`` key whose value is a boolean. If you supply a string,
|
||||
@@ -368,13 +410,15 @@ Using a custom spack in your pipeline
|
||||
|
||||
If your runners will not have a version of spack ready to invoke, or if for some
|
||||
other reason you want to use a custom version of spack to run your pipelines,
|
||||
this can be accomplished fairly simply. First, create CI environment variables
|
||||
containing the url and branch/tag you want to clone (calling them, for example,
|
||||
``SPACK_REPO`` and ``SPACK_REF``), use them to clone spack in your pre-ci
|
||||
``before_script``, and finally pass those same values along to the workload
|
||||
generation process via the ``spack-repo`` and ``spack-ref`` cli args. Here's
|
||||
the ``generate-pipeline`` job from the top of this document, updated to clone
|
||||
a custom spack and make sure the generated rebuild jobs will clone it too:
|
||||
this section provides an example of how you could take advantage of
|
||||
user-provided pipeline scripts to accomplish this fairly simply. First, you
|
||||
could use the GitLab user interface to create CI environment variables
|
||||
containing the url and branch or tag you want to use (calling them, for
|
||||
example, ``SPACK_REPO`` and ``SPACK_REF``), then refer to those in a custom shell
|
||||
script invoked both from your pipeline generation job, as well as in your rebuild
|
||||
jobs. Here's the ``generate-pipeline`` job from the top of this document,
|
||||
updated to invoke a custom shell script that will clone and source a custom
|
||||
spack:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -382,12 +426,10 @@ a custom spack and make sure the generated rebuild jobs will clone it too:
|
||||
tags:
|
||||
- <some-other-tag>
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO} --branch ${SPACK_REF}
|
||||
- . ./spack/share/spack/setup-env.sh
|
||||
- ./cloneSpack.sh
|
||||
script:
|
||||
- spack env activate .
|
||||
- spack env activate --without-view .
|
||||
- spack ci generate
|
||||
--spack-repo ${SPACK_REPO} --spack-ref ${SPACK_REF}
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
@@ -395,13 +437,68 @@ a custom spack and make sure the generated rebuild jobs will clone it too:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
|
||||
And the ``cloneSpack.sh`` script could contain:
|
||||
|
||||
If the ``spack ci generate`` command receives those extra command line arguments,
|
||||
then it adds similar ``before_script`` and ``after_script`` sections for each of
|
||||
the ``spack ci rebuild`` jobs it generates (cloning and sourcing a custom
|
||||
spack in the ``before_script`` and removing it again in the ``after_script``).
|
||||
This gives you control over the version of spack used when the rebuild jobs
|
||||
are actually run on the gitlab runner.
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
git clone ${SPACK_REPO}
|
||||
pushd ./spack
|
||||
git checkout ${SPACK_REF}
|
||||
popd
|
||||
|
||||
. "./spack/share/spack/setup-env.sh"
|
||||
|
||||
spack --version
|
||||
|
||||
Finally, you would also want your generated rebuild jobs to clone that version
|
||||
of spack, so you would update your ``spack.yaml`` from above as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
...
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
before_script:
|
||||
- ./cloneSpack.sh
|
||||
script:
|
||||
- spack env activate --without-view .
|
||||
- spack -d ci rebuild
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
|
||||
Now all of the generated rebuild jobs will use the same shell script to clone
|
||||
spack before running their actual workload. Note in the above example the
|
||||
provision of a custom ``script`` section. The reason for this is to run
|
||||
``spack ci rebuild`` in debug mode to get more information when builds fail.
|
||||
|
||||
Now imagine you have long pipelines with many specs to be built, and you
|
||||
are pointing to a spack repository and branch that has a tendency to change
|
||||
frequently, such as the main repo and it's ``develop`` branch. If each child
|
||||
job checks out the ``develop`` branch, that could result in some jobs running
|
||||
with one SHA of spack, while later jobs run with another. To help avoid this
|
||||
issue, the pipeline generation process saves global variables called
|
||||
``SPACK_VERSION`` and ``SPACK_CHECKOUT_VERSION`` that capture the version
|
||||
of spack used to generate the pipeline. While the ``SPACK_VERSION`` variable
|
||||
simply contains the human-readable value produced by ``spack -V`` at pipeline
|
||||
generation time, the ``SPACK_CHECKOUT_VERSION`` variable can be used in a
|
||||
``git checkout`` command to make sure all child jobs checkout the same version
|
||||
of spack used to generate the pipeline. To take advantage of this, you could
|
||||
simply replace ``git checkout ${SPACK_REF}`` in the example ``cloneSpack.sh``
|
||||
script above with ``git checkout ${SPACK_CHECKOUT_VERSION}``.
|
||||
|
||||
On the other hand, if you're pointing to a spack repository and branch under your
|
||||
control, there may be no benefit in using the captured ``SPACK_CHECKOUT_VERSION``,
|
||||
and you can instead just clone using the project CI variables you set (in the
|
||||
earlier example these were ``SPACK_REPO`` and ``SPACK_REF``).
|
||||
|
||||
.. _ci_environment_variables:
|
||||
|
||||
|
||||
@@ -703,400 +703,6 @@ environments:
|
||||
Administrators might find things easier to maintain without the
|
||||
added "heavyweight" state of a view.
|
||||
|
||||
------------------------------
|
||||
Developing Software with Spack
|
||||
------------------------------
|
||||
|
||||
For any project, one needs to assemble an
|
||||
environment of that application's dependencies. You might consider
|
||||
loading a series of modules or creating a filesystem view. This
|
||||
approach, while obvious, has some serious drawbacks:
|
||||
|
||||
1. There is no guarantee that an environment created this way will be
|
||||
consistent. Your application could end up with dependency A
|
||||
expecting one version of MPI, and dependency B expecting another.
|
||||
The linker will not be happy...
|
||||
|
||||
2. Suppose you need to debug a package deep within your software DAG.
|
||||
If you build that package with a manual environment, then it
|
||||
becomes difficult to have Spack auto-build things that depend on
|
||||
it. That could be a serious problem, depending on how deep the
|
||||
package in question is in your dependency DAG.
|
||||
|
||||
3. At its core, Spack is a sophisticated concretization algorithm that
|
||||
matches up packages with appropriate dependencies and creates a
|
||||
*consistent* environment for the package it's building. Writing a
|
||||
list of ``spack load`` commands for your dependencies is at least
|
||||
as hard as writing the same list of ``depends_on()`` declarations
|
||||
in a Spack package. But it makes no use of Spack concretization
|
||||
and is more error-prone.
|
||||
|
||||
4. Spack provides an automated, systematic way not just to find a
|
||||
packages's dependencies --- but also to build other packages on
|
||||
top. Any Spack package can become a dependency for another Spack
|
||||
package, offering a powerful vision of software re-use. If you
|
||||
build your package A outside of Spack, then your ability to use it
|
||||
as a building block for other packages in an automated way is
|
||||
diminished: other packages depending on package A will not
|
||||
be able to use Spack to fulfill that dependency.
|
||||
|
||||
5. If you are reading this manual, you probably love Spack. You're
|
||||
probably going to write a Spack package for your software so
|
||||
prospective users can install it with the least amount of pain.
|
||||
Why should you go to additional work to find dependencies in your
|
||||
development environment? Shouldn't Spack be able to help you build
|
||||
your software based on the package you've already written?
|
||||
|
||||
In this section, we show how Spack can be used in the software
|
||||
development process to greatest effect, and how development packages
|
||||
can be seamlessly integrated into the Spack ecosystem. We will show
|
||||
how this process works by example, assuming the software you are
|
||||
creating is called ``mylib``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Write the CMake Build
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For now, the techniques in this section only work for CMake-based
|
||||
projects, although they could be easily extended to other build
|
||||
systems in the future. We will therefore assume you are using CMake
|
||||
to build your project.
|
||||
|
||||
The ``CMakeLists.txt`` file should be written as normal. A few caveats:
|
||||
|
||||
1. Your project should produce binaries with RPATHs. This will ensure
|
||||
that they work the same whether built manually or automatically by
|
||||
Spack. For example:
|
||||
|
||||
.. code-block:: cmake
|
||||
|
||||
# enable @rpath in the install name for any shared library being built
|
||||
# note: it is planned that a future version of CMake will enable this by default
|
||||
set(CMAKE_MACOSX_RPATH 1)
|
||||
|
||||
# Always use full RPATH
|
||||
# http://www.cmake.org/Wiki/CMake_RPATH_handling
|
||||
# http://www.kitware.com/blog/home/post/510
|
||||
|
||||
# use, i.e. don't skip the full RPATH for the build tree
|
||||
SET(CMAKE_SKIP_BUILD_RPATH FALSE)
|
||||
|
||||
# when building, don't use the install RPATH already
|
||||
# (but later on when installing)
|
||||
SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
|
||||
|
||||
# add the automatically determined parts of the RPATH
|
||||
# which point to directories outside the build tree to the install RPATH
|
||||
SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
|
||||
# the RPATH to be used when installing, but only if it's not a system directory
|
||||
LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/lib" isSystemDir)
|
||||
IF("${isSystemDir}" STREQUAL "-1")
|
||||
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
|
||||
ENDIF("${isSystemDir}" STREQUAL "-1")
|
||||
|
||||
|
||||
2. Spack provides a CMake variable called
|
||||
``SPACK_TRANSITIVE_INCLUDE_PATH``, which contains the ``include/``
|
||||
directory for all of your project's transitive dependencies. It
|
||||
can be useful if your project ``#include``s files from package B,
|
||||
which ``#include`` files from package C, but your project only
|
||||
lists project B as a dependency. This works in traditional
|
||||
single-tree build environments, in which B and C's include files
|
||||
live in the same place. In order to make it work with Spack as
|
||||
well, you must add the following to ``CMakeLists.txt``. It will
|
||||
have no effect when building without Spack:
|
||||
|
||||
.. code-block:: cmake
|
||||
|
||||
# Include all the transitive dependencies determined by Spack.
|
||||
# If we're not running with Spack, this does nothing...
|
||||
include_directories($ENV{SPACK_TRANSITIVE_INCLUDE_PATH})
|
||||
|
||||
.. note::
|
||||
|
||||
Note that this feature is controversial and could break with
|
||||
future versions of GNU ld. The best practice is to make sure
|
||||
anything you ``#include`` is listed as a dependency in your
|
||||
CMakeLists.txt (and Spack package).
|
||||
|
||||
.. _write-the-spack-package:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Write the Spack Package
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack package also needs to be written, in tandem with setting up
|
||||
the build (for example, CMake). The most important part of this task
|
||||
is declaring dependencies. Here is an example of the Spack package
|
||||
for the ``mylib`` package (ellipses for brevity):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Mylib(CMakePackage):
|
||||
"""Misc. reusable utilities used by Myapp."""
|
||||
|
||||
homepage = "https://github.com/citibeth/mylib"
|
||||
url = "https://github.com/citibeth/mylib/tarball/123"
|
||||
|
||||
version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9')
|
||||
version('develop', git='https://github.com/citibeth/mylib.git',
|
||||
branch='develop')
|
||||
|
||||
variant('everytrace', default=False,
|
||||
description='Report errors through Everytrace')
|
||||
...
|
||||
|
||||
extends('python')
|
||||
|
||||
depends_on('eigen')
|
||||
depends_on('everytrace', when='+everytrace')
|
||||
depends_on('proj', when='+proj')
|
||||
...
|
||||
depends_on('cmake', type='build')
|
||||
depends_on('doxygen', type='build')
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
return [
|
||||
'-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
|
||||
'-DUSE_PROJ4=%s' % ('YES' if '+proj' in spec else 'NO'),
|
||||
...
|
||||
'-DUSE_UDUNITS2=%s' % ('YES' if '+udunits2' in spec else 'NO'),
|
||||
'-DUSE_GTEST=%s' % ('YES' if '+googletest' in spec else 'NO')]
|
||||
|
||||
This is a standard Spack package that can be used to install
|
||||
``mylib`` in a production environment. The list of dependencies in
|
||||
the Spack package will generally be a repeat of the list of CMake
|
||||
dependencies. This package also has some features that allow it to be
|
||||
used for development:
|
||||
|
||||
1. It subclasses ``CMakePackage`` instead of ``Package``. This
|
||||
eliminates the need to write an ``install()`` method, which is
|
||||
defined in the superclass. Instead, one just needs to write the
|
||||
``configure_args()`` method. That method should return the
|
||||
arguments needed for the ``cmake`` command (beyond the standard
|
||||
CMake arguments, which Spack will include already). These
|
||||
arguments are typically used to turn features on/off in the build.
|
||||
|
||||
2. It specifies a non-checksummed version ``develop``. Running
|
||||
``spack install mylib@develop`` the ``@develop`` version will
|
||||
install the latest version off the develop branch. This method of
|
||||
download is useful for the developer of a project while it is in
|
||||
active development; however, it should only be used by developers
|
||||
who control and trust the repository in question!
|
||||
|
||||
3. The ``url``, ``url_for_version()`` and ``homepage`` attributes are
|
||||
not used in development. Don't worry if you don't have any, or if
|
||||
they are behind a firewall.
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Build with Spack
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Now that you have a Spack package, you can use Spack to find its
|
||||
dependencies automatically. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd mylib
|
||||
$ spack setup mylib@local
|
||||
|
||||
The result will be a file ``spconfig.py`` in the top-level
|
||||
``mylib/`` directory. It is a short script that calls CMake with the
|
||||
dependencies and options determined by Spack --- similar to what
|
||||
happens in ``spack install``, but now written out in script form.
|
||||
From a developer's point of view, you can think of ``spconfig.py`` as
|
||||
a stand-in for the ``cmake`` command.
|
||||
|
||||
.. note::
|
||||
|
||||
You can invent any "version" you like for the ``spack setup``
|
||||
command.
|
||||
|
||||
.. note::
|
||||
|
||||
Although ``spack setup`` does not build your package, it does
|
||||
create and install a module file, and mark in the database that
|
||||
your package has been installed. This can lead to errors, of
|
||||
course, if you don't subsequently install your package.
|
||||
Also... you will need to ``spack uninstall`` before you run
|
||||
``spack setup`` again.
|
||||
|
||||
|
||||
You can now build your project as usual with CMake:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir build; cd build
|
||||
$ ../spconfig.py .. # Instead of cmake ..
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
Once your ``make install`` command is complete, your package will be
|
||||
installed, just as if you'd run ``spack install``. Except you can now
|
||||
edit, re-build and re-install as often as needed, without checking
|
||||
into Git or downloading tarballs.
|
||||
|
||||
.. note::
|
||||
|
||||
The build you get this way will be *almost* the same as the build
|
||||
from ``spack install``. The only difference is, you will not be
|
||||
using Spack's compiler wrappers. This difference has not caused
|
||||
problems in our experience, as long as your project sets
|
||||
RPATHs as shown above. You DO use RPATHs, right?
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Build Other Software
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Now that you've built ``mylib`` with Spack, you might want to build
|
||||
another package that depends on it --- for example, ``myapp``. This
|
||||
is accomplished easily enough:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install myapp ^mylib@local
|
||||
|
||||
Note that auto-built software has now been installed *on top of*
|
||||
manually-built software, without breaking Spack's "web." This
|
||||
property is useful if you need to debug a package deep in the
|
||||
dependency hierarchy of your application. It is a *big* advantage of
|
||||
using ``spack setup`` to build your package's environment.
|
||||
|
||||
If you feel your software is stable, you might wish to install it with
|
||||
``spack install`` and skip the source directory. You can just use,
|
||||
for example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mylib@develop
|
||||
|
||||
.. _release-your-software:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Release Your Software
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You are now ready to release your software as a tarball with a
|
||||
numbered version, and a Spack package that can build it. If you're
|
||||
hosted on GitHub, this process will be a bit easier.
|
||||
|
||||
#. Put tag(s) on the version(s) in your GitHub repo you want to be
|
||||
release versions. For example, a tag ``v0.1.0`` for version 0.1.0.
|
||||
|
||||
#. Set the ``url`` in your ``package.py`` to download a tarball for
|
||||
the appropriate version. GitHub will give you a tarball for any
|
||||
commit in the repo, if you tickle it the right way. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
url = 'https://github.com/citibeth/mylib/tarball/v0.1.2'
|
||||
|
||||
#. Use Spack to determine your version's hash, and cut'n'paste it into
|
||||
your ``package.py``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack checksum mylib 0.1.2
|
||||
==> Found 1 versions of mylib
|
||||
0.1.2 https://github.com/citibeth/mylib/tarball/v0.1.2
|
||||
|
||||
How many would you like to checksum? (default is 5, q to abort)
|
||||
==> Downloading...
|
||||
==> Trying to fetch from https://github.com/citibeth/mylib/tarball/v0.1.2
|
||||
######################################################################## 100.0%
|
||||
==> Checksummed new versions of mylib:
|
||||
version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9')
|
||||
|
||||
#. You should now be able to install released version 0.1.2 of your package with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install mylib@0.1.2
|
||||
|
||||
#. There is no need to remove the `develop` version from your package.
|
||||
Spack concretization will always prefer numbered version to
|
||||
non-numeric versions. Users will only get it if they ask for it.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Distribute Your Software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once you've released your software, other people will want to build
|
||||
it; and you will need to tell them how. In the past, that has meant a
|
||||
few paragraphs of prose explaining which dependencies to install. But
|
||||
now you use Spack, and those instructions are written in executable
|
||||
Python code. But your software has many dependencies, and you know
|
||||
Spack is the best way to install it:
|
||||
|
||||
#. First, you will want to fork Spack's ``develop`` branch. Your aim
|
||||
is to provide a stable version of Spack that you KNOW will install
|
||||
your software. If you make changes to Spack in the process, you
|
||||
will want to submit pull requests to Spack core.
|
||||
|
||||
#. Add your software's ``package.py`` to that fork. You should submit
|
||||
a pull request for this as well, unless you don't want the public
|
||||
to know about your software.
|
||||
|
||||
#. Prepare instructions that read approximately as follows:
|
||||
|
||||
#. Download Spack from your forked repo.
|
||||
|
||||
#. Install Spack; see :ref:`getting_started`.
|
||||
|
||||
#. Set up an appropriate ``packages.yaml`` file. You should tell
|
||||
your users to include in this file whatever versions/variants
|
||||
are needed to make your software work correctly (assuming those
|
||||
are not already in your ``packages.yaml``).
|
||||
|
||||
#. Run ``spack install mylib``.
|
||||
|
||||
#. Run this script to generate the ``module load`` commands or
|
||||
filesystem view needed to use this software.
|
||||
|
||||
#. Be aware that your users might encounter unexpected bootstrapping
|
||||
issues on their machines, especially if they are running on older
|
||||
systems. The :ref:`getting_started` section should cover this, but
|
||||
there could always be issues.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Other Build Systems
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``spack setup`` currently only supports CMake-based builds, in
|
||||
packages that subclass ``CMakePackage``. The intent is that this
|
||||
mechanism should support a wider range of build systems; for example,
|
||||
GNU Autotools. Someone well-versed in Autotools is needed to develop
|
||||
this patch and test it out.
|
||||
|
||||
Python Distutils is another popular build system that should get
|
||||
``spack setup`` support. For non-compiled languages like Python,
|
||||
``spack diy`` may be used. Even better is to put the source directory
|
||||
directly in the user's ``PYTHONPATH``. Then, edits in source files
|
||||
are immediately available to run without any install process at all!
|
||||
|
||||
^^^^^^^^^^
|
||||
Conclusion
|
||||
^^^^^^^^^^
|
||||
|
||||
The ``spack setup`` development workflow provides better automation,
|
||||
flexibility and safety than workflows relying on environment modules
|
||||
or filesystem views. However, it has some drawbacks:
|
||||
|
||||
#. It currently works only with projects that use the CMake build
|
||||
system. Support for other build systems is not hard to build, but
|
||||
will require a small amount of effort for each build system to be
|
||||
supported. It might not work well with some IDEs.
|
||||
|
||||
#. It only works with packages that sub-class ``StagedPackage``.
|
||||
Currently, most Spack packages do not. Converting them is not
|
||||
hard; but must be done on a package-by-package basis.
|
||||
|
||||
#. It requires that users are comfortable with Spack, as they
|
||||
integrate Spack explicitly in their workflow. Not all users are
|
||||
willing to do this.
|
||||
|
||||
-------------------------------------
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
-------------------------------------
|
||||
@@ -1405,11 +1011,12 @@ The main points that are implemented below:
|
||||
- export CXXFLAGS="-std=c++11"
|
||||
|
||||
install:
|
||||
- if ! which spack >/dev/null; then
|
||||
- |
|
||||
if ! which spack >/dev/null; then
|
||||
mkdir -p $SPACK_ROOT &&
|
||||
git clone --depth 50 https://github.com/spack/spack.git $SPACK_ROOT &&
|
||||
echo -e "config:""\n build_jobs:"" 2" > $SPACK_ROOT/etc/spack/config.yaml **
|
||||
echo -e "packages:""\n all:""\n target:"" ['x86_64']"
|
||||
printf "config:\n build_jobs: 2\n" > $SPACK_ROOT/etc/spack/config.yaml &&
|
||||
printf "packages:\n all:\n target: ['x86_64']\n" \
|
||||
> $SPACK_ROOT/etc/spack/packages.yaml;
|
||||
fi
|
||||
- travis_wait spack install cmake@3.7.2~openssl~ncurses
|
||||
@@ -1544,8 +1151,9 @@ Avoid double-installing CUDA by adding, e.g.
|
||||
|
||||
packages:
|
||||
cuda:
|
||||
paths:
|
||||
cuda@9.0.176%gcc@5.4.0 arch=linux-ubuntu16-x86_64: /usr/local/cuda
|
||||
externals:
|
||||
- spec: "cuda@9.0.176%gcc@5.4.0 arch=linux-ubuntu16-x86_64"
|
||||
prefix: /usr/local/cuda
|
||||
buildable: False
|
||||
|
||||
to your ``packages.yaml``.
|
||||
|
||||
1
lib/spack/external/ctest_log_parser.py
vendored
1
lib/spack/external/ctest_log_parser.py
vendored
@@ -118,6 +118,7 @@ def match(self, text):
|
||||
"([^:]+): (Error:|error|undefined reference|multiply defined)",
|
||||
"([^ :]+) ?: (error|fatal error|catastrophic error)",
|
||||
"([^:]+)\\(([^\\)]+)\\) ?: (error|fatal error|catastrophic error)"),
|
||||
"^FAILED",
|
||||
"^[Bb]us [Ee]rror",
|
||||
"^[Ss]egmentation [Vv]iolation",
|
||||
"^[Ss]egmentation [Ff]ault",
|
||||
|
||||
@@ -652,15 +652,14 @@
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha",
|
||||
"sha_ni",
|
||||
"umip",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512vbmi2",
|
||||
"avx512vpopcntdq",
|
||||
"avx512bitalg",
|
||||
"avx512vnni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes"
|
||||
],
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
@@ -42,6 +41,8 @@
|
||||
'fix_darwin_install_name',
|
||||
'force_remove',
|
||||
'force_symlink',
|
||||
'chgrp',
|
||||
'chmod_x',
|
||||
'copy',
|
||||
'install',
|
||||
'copy_tree',
|
||||
@@ -52,6 +53,7 @@
|
||||
'partition_path',
|
||||
'prefixes',
|
||||
'remove_dead_links',
|
||||
'remove_directory_contents',
|
||||
'remove_if_dead_link',
|
||||
'remove_linked_tree',
|
||||
'set_executable',
|
||||
@@ -338,56 +340,78 @@ def unset_executable_mode(path):
|
||||
|
||||
|
||||
def copy(src, dest, _permissions=False):
|
||||
"""Copies the file *src* to the file or directory *dest*.
|
||||
"""Copy the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
If *dest* specifies a directory, the file will be copied into *dest*
|
||||
using the base filename from *src*.
|
||||
|
||||
*src* may contain glob characters.
|
||||
|
||||
Parameters:
|
||||
src (str): the file to copy
|
||||
src (str): the file(s) to copy
|
||||
dest (str): the destination file or directory
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
if _permissions:
|
||||
tty.debug('Installing {0} to {1}'.format(src, dest))
|
||||
else:
|
||||
tty.debug('Copying {0} to {1}'.format(src, dest))
|
||||
|
||||
# Expand dest to its eventual full path if it is a directory.
|
||||
if os.path.isdir(dest):
|
||||
dest = join_path(dest, os.path.basename(src))
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(
|
||||
src, dest))
|
||||
|
||||
shutil.copy(src, dest)
|
||||
for src in files:
|
||||
# Expand dest to its eventual full path if it is a directory.
|
||||
dst = dest
|
||||
if os.path.isdir(dest):
|
||||
dst = join_path(dest, os.path.basename(src))
|
||||
|
||||
if _permissions:
|
||||
set_install_permissions(dest)
|
||||
copy_mode(src, dest)
|
||||
shutil.copy(src, dst)
|
||||
|
||||
if _permissions:
|
||||
set_install_permissions(dst)
|
||||
copy_mode(src, dst)
|
||||
|
||||
|
||||
def install(src, dest):
|
||||
"""Installs the file *src* to the file or directory *dest*.
|
||||
"""Install the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
Same as :py:func:`copy` with the addition of setting proper
|
||||
permissions on the installed file.
|
||||
|
||||
Parameters:
|
||||
src (str): the file to install
|
||||
src (str): the file(s) to install
|
||||
dest (str): the destination file or directory
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
def resolve_link_target_relative_to_the_link(l):
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(l)
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(l))
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@@ -397,6 +421,8 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
If the destination directory *dest* does not already exist, it will
|
||||
be created as well as missing parent directories.
|
||||
|
||||
*src* may contain glob characters.
|
||||
|
||||
If *symlinks* is true, symbolic links in the source tree are represented
|
||||
as symbolic links in the new tree and the metadata of the original links
|
||||
will be copied as far as the platform allows; if false, the contents and
|
||||
@@ -411,56 +437,66 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (function): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if _permissions:
|
||||
tty.debug('Installing {0} to {1}'.format(src, dest))
|
||||
else:
|
||||
tty.debug('Copying {0} to {1}'.format(src, dest))
|
||||
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
abs_src += os.path.sep
|
||||
abs_dest = os.path.abspath(dest)
|
||||
if not abs_dest.endswith(os.path.sep):
|
||||
abs_dest += os.path.sep
|
||||
|
||||
# Stop early to avoid unnecessary recursion if being asked to copy from a
|
||||
# parent directory.
|
||||
if abs_dest.startswith(abs_src):
|
||||
raise ValueError('Cannot copy ancestor directory {0} into {1}'.
|
||||
format(abs_src, abs_dest))
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
mkdirp(dest)
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
abs_src += os.path.sep
|
||||
|
||||
for s, d in traverse_tree(abs_src, abs_dest, order='pre',
|
||||
follow_symlinks=not symlinks,
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True):
|
||||
if os.path.islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
if os.path.isabs(target):
|
||||
new_target = re.sub(abs_src, abs_dest, target)
|
||||
if new_target != target:
|
||||
tty.debug("Redirecting link {0} to {1}"
|
||||
.format(target, new_target))
|
||||
target = new_target
|
||||
# Stop early to avoid unnecessary recursion if being asked to copy
|
||||
# from a parent directory.
|
||||
if abs_dest.startswith(abs_src):
|
||||
raise ValueError('Cannot copy ancestor directory {0} into {1}'.
|
||||
format(abs_src, abs_dest))
|
||||
|
||||
os.symlink(target, d)
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
mkdirp(abs_dest)
|
||||
|
||||
for s, d in traverse_tree(abs_src, abs_dest, order='pre',
|
||||
follow_symlinks=not symlinks,
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True):
|
||||
if os.path.islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
if os.path.isabs(target):
|
||||
new_target = re.sub(abs_src, abs_dest, target)
|
||||
if new_target != target:
|
||||
tty.debug("Redirecting link {0} to {1}"
|
||||
.format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
os.symlink(target, d)
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
shutil.copyfile(s, d)
|
||||
else:
|
||||
shutil.copyfile(s, d)
|
||||
else:
|
||||
if os.path.isdir(s):
|
||||
mkdirp(d)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
if os.path.isdir(s):
|
||||
mkdirp(d)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
@@ -474,6 +510,10 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (function): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
@@ -643,7 +683,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit):
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as e:
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
@@ -654,6 +694,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
|
||||
|
||||
msg = 'the transactional move of "{0}" failed.'
|
||||
msg += '\n ' + str(e)
|
||||
raise RuntimeError(msg.format(directory_name))
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
@@ -937,6 +978,53 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(path, True)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def safe_remove(*files_or_dirs):
|
||||
"""Context manager to remove the files passed as input, but restore
|
||||
them in case any exception is raised in the context block.
|
||||
|
||||
Args:
|
||||
*files_or_dirs: glob expressions for files or directories
|
||||
to be removed
|
||||
|
||||
Returns:
|
||||
Dictionary that maps deleted files to their temporary copy
|
||||
within the context block.
|
||||
"""
|
||||
# Find all the files or directories that match
|
||||
glob_matches = [glob.glob(x) for x in files_or_dirs]
|
||||
# Sort them so that shorter paths like "/foo/bar" come before
|
||||
# nested paths like "/foo/bar/baz.yaml". This simplifies the
|
||||
# handling of temporary copies below
|
||||
sorted_matches = sorted([
|
||||
os.path.abspath(x) for x in itertools.chain(*glob_matches)
|
||||
], key=len)
|
||||
|
||||
# Copy files and directories in a temporary location
|
||||
removed, dst_root = {}, tempfile.mkdtemp()
|
||||
try:
|
||||
for id, file_or_dir in enumerate(sorted_matches):
|
||||
# The glob expression at the top ensures that the file/dir exists
|
||||
# at the time we enter the loop. Double check here since it might
|
||||
# happen that a previous iteration of the loop already removed it.
|
||||
# This is the case, for instance, if we remove the directory
|
||||
# "/foo/bar" before the file "/foo/bar/baz.yaml".
|
||||
if not os.path.exists(file_or_dir):
|
||||
continue
|
||||
# The monotonic ID is a simple way to make the filename
|
||||
# or directory name unique in the temporary folder
|
||||
basename = os.path.basename(file_or_dir) + '-{0}'.format(id)
|
||||
temporary_path = os.path.join(dst_root, basename)
|
||||
shutil.move(file_or_dir, temporary_path)
|
||||
removed[file_or_dir] = temporary_path
|
||||
yield removed
|
||||
except BaseException:
|
||||
# Restore the files that were removed
|
||||
for original_path, temporary_path in removed.items():
|
||||
shutil.move(temporary_path, original_path)
|
||||
raise
|
||||
|
||||
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
@@ -1570,6 +1658,19 @@ def can_access_dir(path):
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
|
||||
|
||||
|
||||
@memoized
|
||||
def can_write_to_dir(path):
|
||||
"""Return True if the argument is a directory in which we can write.
|
||||
|
||||
Args:
|
||||
path: path to be tested
|
||||
|
||||
Returns:
|
||||
True if ``path`` is an writeable directory, else False
|
||||
"""
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
|
||||
|
||||
|
||||
@memoized
|
||||
def files_in(*search_paths):
|
||||
"""Returns all the files in paths passed as arguments.
|
||||
@@ -1683,3 +1784,28 @@ def prefixes(path):
|
||||
pass
|
||||
|
||||
return paths
|
||||
|
||||
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
Args:
|
||||
file (str): file to be checksummed
|
||||
|
||||
Returns:
|
||||
MD5 sum of the file's content
|
||||
"""
|
||||
md5 = hashlib.md5()
|
||||
with open(file, "rb") as f:
|
||||
md5.update(f.read())
|
||||
return md5.digest()
|
||||
|
||||
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import functools
|
||||
@@ -19,49 +20,67 @@
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
|
||||
# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start
|
||||
# method. Spack cannot currently handle this, so force the process to start
|
||||
# using the 'fork' start method.
|
||||
#
|
||||
# TODO: This solution is not ideal, as the 'fork' start method can lead to
|
||||
# crashes of the subprocess. Figure out how to make 'spawn' work.
|
||||
#
|
||||
# See:
|
||||
# * https://github.com/spack/spack/pull/18124
|
||||
# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501
|
||||
# * https://bugs.python.org/issue33725
|
||||
if sys.version_info >= (3,): # novm
|
||||
fork_context = multiprocessing.get_context('fork')
|
||||
else:
|
||||
fork_context = multiprocessing
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
"""Create a hierarchy of dictionaries by splitting the supplied
|
||||
set of objects on unique values of the supplied functions.
|
||||
Values are used as keys. For example, suppose you have four
|
||||
objects with attributes that look like this::
|
||||
set of objects on unique values of the supplied functions.
|
||||
|
||||
a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
|
||||
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
|
||||
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
Values are used as keys. For example, suppose you have four
|
||||
objects with attributes that look like this::
|
||||
|
||||
list_of_specs = [a,b,c,d]
|
||||
index1 = index_by(list_of_specs, lambda s: s.arch,
|
||||
lambda s: s.compiler)
|
||||
index2 = index_by(list_of_specs, lambda s: s.compiler)
|
||||
a = Spec("boost %gcc target=skylake")
|
||||
b = Spec("mrnet %intel target=zen2")
|
||||
c = Spec("libelf %xlc target=skylake")
|
||||
d = Spec("libdwarf %intel target=zen2")
|
||||
|
||||
``index1`` now has two levels of dicts, with lists at the
|
||||
leaves, like this::
|
||||
list_of_specs = [a,b,c,d]
|
||||
index1 = index_by(list_of_specs, lambda s: str(s.target),
|
||||
lambda s: s.compiler)
|
||||
index2 = index_by(list_of_specs, lambda s: s.compiler)
|
||||
|
||||
{ 'bgqos_0' : { 'gcc' : [a], 'xlc' : [c] },
|
||||
'chaos_5_x86_64_ib' : { 'intel' : [b, d] }
|
||||
}
|
||||
``index1`` now has two levels of dicts, with lists at the
|
||||
leaves, like this::
|
||||
|
||||
And ``index2`` is a single level dictionary of lists that looks
|
||||
like this::
|
||||
{ 'zen2' : { 'gcc' : [a], 'xlc' : [c] },
|
||||
'skylake' : { 'intel' : [b, d] }
|
||||
}
|
||||
|
||||
{ 'gcc' : [a],
|
||||
'intel' : [b,d],
|
||||
'xlc' : [c]
|
||||
}
|
||||
And ``index2`` is a single level dictionary of lists that looks
|
||||
like this::
|
||||
|
||||
If any elemnts in funcs is a string, it is treated as the name
|
||||
of an attribute, and acts like getattr(object, name). So
|
||||
shorthand for the above two indexes would be::
|
||||
{ 'gcc' : [a],
|
||||
'intel' : [b,d],
|
||||
'xlc' : [c]
|
||||
}
|
||||
|
||||
index1 = index_by(list_of_specs, 'arch', 'compiler')
|
||||
index2 = index_by(list_of_specs, 'compiler')
|
||||
If any elements in funcs is a string, it is treated as the name
|
||||
of an attribute, and acts like getattr(object, name). So
|
||||
shorthand for the above two indexes would be::
|
||||
|
||||
You can also index by tuples by passing tuples::
|
||||
index1 = index_by(list_of_specs, 'arch', 'compiler')
|
||||
index2 = index_by(list_of_specs, 'compiler')
|
||||
|
||||
index1 = index_by(list_of_specs, ('arch', 'compiler'))
|
||||
You can also index by tuples by passing tuples::
|
||||
|
||||
Keys in the resulting dict will look like ('gcc', 'bgqos_0').
|
||||
index1 = index_by(list_of_specs, ('target', 'compiler'))
|
||||
|
||||
Keys in the resulting dict will look like ('gcc', 'skylake').
|
||||
"""
|
||||
if not funcs:
|
||||
return objects
|
||||
|
||||
@@ -174,8 +174,9 @@ def _lock(self, op, timeout=None):
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
tty.debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(lock_type[op], self._start, self._length, timeout))
|
||||
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(lock_type[op], self._start, self._length,
|
||||
timeout))
|
||||
|
||||
poll_intervals = iter(Lock._poll_interval_generator())
|
||||
start_time = time.time()
|
||||
@@ -211,14 +212,14 @@ def _poll_lock(self, op):
|
||||
# help for debugging distributed locking
|
||||
if self.debug:
|
||||
# All locks read the owner PID and host
|
||||
self._read_debug_data()
|
||||
tty.debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(lock_type[op], self.path,
|
||||
self._start, self._length, self.pid))
|
||||
self._read_log_debug_data()
|
||||
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(lock_type[op], self.path,
|
||||
self._start, self._length, self.pid))
|
||||
|
||||
# Exclusive locks write their PID/host
|
||||
if op == fcntl.LOCK_EX:
|
||||
self._write_debug_data()
|
||||
self._write_log_debug_data()
|
||||
|
||||
return True
|
||||
|
||||
@@ -245,7 +246,7 @@ def _ensure_parent_directory(self):
|
||||
raise
|
||||
return parent
|
||||
|
||||
def _read_debug_data(self):
|
||||
def _read_log_debug_data(self):
|
||||
"""Read PID and host data out of the file if it is there."""
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
@@ -257,7 +258,7 @@ def _read_debug_data(self):
|
||||
_, _, self.host = host.rpartition('=')
|
||||
self.pid = int(self.pid)
|
||||
|
||||
def _write_debug_data(self):
|
||||
def _write_log_debug_data(self):
|
||||
"""Write PID and host data to the file, recording old values."""
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
@@ -473,9 +474,6 @@ def release_write(self, release_fn=None):
|
||||
else:
|
||||
return False
|
||||
|
||||
def _debug(self, *args):
|
||||
tty.debug(*args)
|
||||
|
||||
def _get_counts_desc(self):
|
||||
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
|
||||
if tty.is_verbose() else ''
|
||||
@@ -484,58 +482,50 @@ def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg(locktype, '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg(locktype, '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype):
|
||||
self._debug2(self._status_msg(locktype, 'Acquiring'))
|
||||
self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
"""Output lock debug messages."""
|
||||
kwargs['level'] = kwargs.get('level', 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg('READ LOCK', '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self):
|
||||
self._debug2(self._status_msg('WRITE LOCK', 'Downgrading'))
|
||||
self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
|
||||
|
||||
def _log_released(self, locktype):
|
||||
now = datetime.now()
|
||||
desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg(locktype, desc))
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype):
|
||||
self._debug2(self._status_msg(locktype, 'Releasing'))
|
||||
self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg('WRITE LOCK', '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self):
|
||||
self._debug2(self._status_msg('READ LOCK', 'Upgrading'))
|
||||
self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
|
||||
|
||||
def _status_msg(self, locktype, status):
|
||||
status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
|
||||
return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
|
||||
locktype, self, status_desc)
|
||||
|
||||
def _debug2(self, *args):
|
||||
# TODO: Easy place to make a single, temporary change to the
|
||||
# TODO: debug level associated with the more detailed messages.
|
||||
# TODO:
|
||||
# TODO: Someday it would be great if we could switch this to
|
||||
# TODO: another level, perhaps _between_ debug and verbose, or
|
||||
# TODO: some other form of filtering so the first level of
|
||||
# TODO: debugging doesn't have to generate these messages. Using
|
||||
# TODO: verbose here did not work as expected because tests like
|
||||
# TODO: test_spec_json will write the verbose messages to the
|
||||
# TODO: output that is used to check test correctness.
|
||||
tty.debug(*args)
|
||||
|
||||
|
||||
class LockTransaction(object):
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
|
||||
from llnl.util.tty.color import cprint, cwrite, cescape, clen
|
||||
|
||||
_debug = False
|
||||
# Globals
|
||||
_debug = 0
|
||||
_verbose = False
|
||||
_stacktrace = False
|
||||
_timestamp = False
|
||||
@@ -29,21 +30,26 @@
|
||||
indent = " "
|
||||
|
||||
|
||||
def debug_level():
|
||||
return _debug
|
||||
|
||||
|
||||
def is_verbose():
|
||||
return _verbose
|
||||
|
||||
|
||||
def is_debug():
|
||||
return _debug
|
||||
def is_debug(level=1):
|
||||
return _debug >= level
|
||||
|
||||
|
||||
def is_stacktrace():
|
||||
return _stacktrace
|
||||
|
||||
|
||||
def set_debug(flag):
|
||||
def set_debug(level=0):
|
||||
global _debug
|
||||
_debug = flag
|
||||
assert level >= 0, 'Debug level must be a positive value'
|
||||
_debug = level
|
||||
|
||||
|
||||
def set_verbose(flag):
|
||||
@@ -132,12 +138,17 @@ def process_stacktrace(countback):
|
||||
return st_text
|
||||
|
||||
|
||||
def show_pid():
|
||||
return is_debug(2)
|
||||
|
||||
|
||||
def get_timestamp(force=False):
|
||||
"""Get a string timestamp"""
|
||||
if _debug or _timestamp or force:
|
||||
# Note inclusion of the PID is useful for parallel builds.
|
||||
return '[{0}, {1}] '.format(
|
||||
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid())
|
||||
pid = ', {0}'.format(os.getpid()) if show_pid() else ''
|
||||
return '[{0}{1}] '.format(
|
||||
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
|
||||
else:
|
||||
return ''
|
||||
|
||||
@@ -197,7 +208,8 @@ def verbose(message, *args, **kwargs):
|
||||
|
||||
|
||||
def debug(message, *args, **kwargs):
|
||||
if _debug:
|
||||
level = kwargs.get('level', 1)
|
||||
if is_debug(level):
|
||||
kwargs.setdefault('format', 'g')
|
||||
kwargs.setdefault('stream', sys.stderr)
|
||||
info(message, *args, **kwargs)
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
from six import StringIO
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import fork_context
|
||||
|
||||
try:
|
||||
import termios
|
||||
@@ -323,14 +324,15 @@ class log_output(object):
|
||||
work within test frameworks like nose and pytest.
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
|
||||
def __init__(self, file_like=None, output=None, error=None,
|
||||
echo=False, debug=0, buffer=False):
|
||||
"""Create a new output log context manager.
|
||||
|
||||
Args:
|
||||
file_like (str or stream): open file object or name of file where
|
||||
output should be logged
|
||||
echo (bool): whether to echo output in addition to logging it
|
||||
debug (bool): whether to enable tty debug mode during logging
|
||||
debug (int): positive to enable tty debug mode during logging
|
||||
buffer (bool): pass buffer=True to skip unbuffering output; note
|
||||
this doesn't set up any *new* buffering
|
||||
|
||||
@@ -348,14 +350,16 @@ def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
|
||||
|
||||
"""
|
||||
self.file_like = file_like
|
||||
self.output = output or sys.stdout
|
||||
self.error = error or sys.stderr
|
||||
self.echo = echo
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
|
||||
def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
|
||||
"""Thie behaves the same as init. It allows a logger to be reused.
|
||||
def __call__(self, file_like=None, output=None, error=None,
|
||||
echo=None, debug=None, buffer=None):
|
||||
"""This behaves the same as init. It allows a logger to be reused.
|
||||
|
||||
Arguments are the same as for ``__init__()``. Args here take
|
||||
precedence over those passed to ``__init__()``.
|
||||
@@ -375,6 +379,10 @@ def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
|
||||
"""
|
||||
if file_like is not None:
|
||||
self.file_like = file_like
|
||||
if output is not None:
|
||||
self.output = output
|
||||
if error is not None:
|
||||
self.error = error
|
||||
if echo is not None:
|
||||
self.echo = echo
|
||||
if debug is not None:
|
||||
@@ -430,11 +438,11 @@ def __enter__(self):
|
||||
except BaseException:
|
||||
input_stream = None # just don't forward input if this fails
|
||||
|
||||
self.process = multiprocessing.Process(
|
||||
self.process = fork_context.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_stream, read_fd, write_fd, self.echo, self.log_file,
|
||||
child_pipe
|
||||
input_stream, read_fd, write_fd, self.echo, self.output,
|
||||
self.log_file, child_pipe
|
||||
)
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
@@ -447,43 +455,54 @@ def __enter__(self):
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
self.output.flush()
|
||||
self.error.flush()
|
||||
# sys.stdout.flush()
|
||||
# sys.stderr.flush()
|
||||
|
||||
# Now do the actual output rediction.
|
||||
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
|
||||
self.use_fds = _file_descriptors_work(self.output, self.error)#sys.stdout, sys.stderr)
|
||||
|
||||
if self.use_fds:
|
||||
# We try first to use OS-level file descriptors, as this
|
||||
# redirects output for subprocesses and system calls.
|
||||
|
||||
# Save old stdout and stderr file descriptors
|
||||
self._saved_stdout = os.dup(sys.stdout.fileno())
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
self._saved_output = os.dup(self.output.fileno())
|
||||
self._saved_error = os.dup(self.error.fileno())
|
||||
# self._saved_stdout = os.dup(sys.stdout.fileno())
|
||||
# self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.dup2(write_fd, self.output.fileno())
|
||||
os.dup2(write_fd, self.error.fileno())
|
||||
# os.dup2(write_fd, sys.stdout.fileno())
|
||||
# os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
# output, but it's the best we can do, and the caller
|
||||
# shouldn't expect any better, since *they* have apparently
|
||||
# redirected I/O the Python way.
|
||||
|
||||
# Save old stdout and stderr file objects
|
||||
self._saved_stdout = sys.stdout
|
||||
self._saved_stderr = sys.stderr
|
||||
self._saved_output = self.output
|
||||
self._saved_error = self.error
|
||||
# self._saved_stdout = sys.stdout
|
||||
# self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, 'w')
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
self.output = pipe_fd_out
|
||||
self.error = pipe_fd_out
|
||||
# sys.stdout = pipe_fd_out
|
||||
# sys.stderr = pipe_fd_out
|
||||
|
||||
# Unbuffer stdout and stderr at the Python level
|
||||
if not self.buffer:
|
||||
sys.stdout = Unbuffered(sys.stdout)
|
||||
sys.stderr = Unbuffered(sys.stderr)
|
||||
self.output = Unbuffered(self.output)
|
||||
self.error = Unbuffered(self.error)
|
||||
# sys.stdout = Unbuffered(sys.stdout)
|
||||
# sys.stderr = Unbuffered(sys.stderr)
|
||||
|
||||
# Force color and debug settings now that we have redirected.
|
||||
tty.color.set_color_when(forced_color)
|
||||
@@ -498,20 +517,29 @@ def __enter__(self):
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
# Flush any buffered output to the logger daemon.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
self.output.flush()
|
||||
self.error.flush()
|
||||
# sys.stdout.flush()
|
||||
# sys.stderr.flush()
|
||||
|
||||
# restore previous output settings, either the low-level way or
|
||||
# the python way
|
||||
if self.use_fds:
|
||||
os.dup2(self._saved_stdout, sys.stdout.fileno())
|
||||
os.close(self._saved_stdout)
|
||||
os.dup2(self._saved_output, self.output.fileno())
|
||||
os.close(self._saved_output)
|
||||
|
||||
os.dup2(self._saved_stderr, sys.stderr.fileno())
|
||||
os.close(self._saved_stderr)
|
||||
os.dup2(self._saved_error, self.error.fileno())
|
||||
os.close(self._saved_error)
|
||||
# os.dup2(self._saved_stdout, sys.stdout.fileno())
|
||||
# os.close(self._saved_stdout)
|
||||
|
||||
# os.dup2(self._saved_stderr, sys.stderr.fileno())
|
||||
# os.close(self._saved_stderr)
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.output = self._saved_output
|
||||
self.error = self._saved_error
|
||||
# sys.stdout = self._saved_stdout
|
||||
# sys.stderr = self._saved_stderr
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.write_log_in_parent:
|
||||
@@ -545,16 +573,17 @@ def force_echo(self):
|
||||
# output. We us these control characters rather than, say, a
|
||||
# separate pipe, because they're in-band and assured to appear
|
||||
# exactly before and after the text we want to echo.
|
||||
sys.stdout.write(xon)
|
||||
sys.stdout.flush()
|
||||
self.output.write(xon)
|
||||
self.output.flush()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.stdout.write(xoff)
|
||||
sys.stdout.flush()
|
||||
self.output.write(xoff)
|
||||
self.output.flush()
|
||||
|
||||
|
||||
def _writer_daemon(stdin, read_fd, write_fd, echo, log_file, control_pipe):
|
||||
def _writer_daemon(stdin, read_fd, write_fd, echo, echo_stream, log_file,
|
||||
control_pipe):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -597,6 +626,7 @@ def _writer_daemon(stdin, read_fd, write_fd, echo, log_file, control_pipe):
|
||||
immediately closed by the writer daemon)
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
echo_stream (stream): output to echo to when echoing
|
||||
log_file (file-like): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
@@ -651,8 +681,8 @@ def _writer_daemon(stdin, read_fd, write_fd, echo, log_file, control_pipe):
|
||||
|
||||
# Echo to stdout if requested or forced.
|
||||
if echo or force_echo:
|
||||
sys.stdout.write(line)
|
||||
sys.stdout.flush()
|
||||
echo_stream.write(line)
|
||||
echo_stream.flush()
|
||||
|
||||
# Stripped output to log file.
|
||||
log_file.write(_strip(line))
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
import traceback
|
||||
|
||||
import llnl.util.tty.log as log
|
||||
from llnl.util.lang import fork_context
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -31,17 +32,17 @@
|
||||
class ProcessController(object):
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
||||
This allows one process to drive another similar to the way a shell
|
||||
would, by sending signals and I/O.
|
||||
This allows one process (the controller) to drive another (the
|
||||
minion) similar to the way a shell would, by sending signals and I/O.
|
||||
|
||||
"""
|
||||
def __init__(self, pid, master_fd,
|
||||
def __init__(self, pid, controller_fd,
|
||||
timeout=1, sleep_time=1e-1, debug=False):
|
||||
"""Create a controller to manipulate the process with id ``pid``
|
||||
|
||||
Args:
|
||||
pid (int): id of process to control
|
||||
master_fd (int): master file descriptor attached to pid's stdin
|
||||
controller_fd (int): controller fd attached to pid's stdin
|
||||
timeout (int): time in seconds for wait operations to time out
|
||||
(default 1 second)
|
||||
sleep_time (int): time to sleep after signals, to control the
|
||||
@@ -58,7 +59,7 @@ def __init__(self, pid, master_fd,
|
||||
"""
|
||||
self.pid = pid
|
||||
self.pgid = os.getpgid(pid)
|
||||
self.master_fd = master_fd
|
||||
self.controller_fd = controller_fd
|
||||
self.timeout = timeout
|
||||
self.sleep_time = sleep_time
|
||||
self.debug = debug
|
||||
@@ -67,8 +68,8 @@ def __init__(self, pid, master_fd,
|
||||
self.ps = which("ps", required=True)
|
||||
|
||||
def get_canon_echo_attrs(self):
|
||||
"""Get echo and canon attributes of the terminal of master_fd."""
|
||||
cfg = termios.tcgetattr(self.master_fd)
|
||||
"""Get echo and canon attributes of the terminal of controller_fd."""
|
||||
cfg = termios.tcgetattr(self.controller_fd)
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
@@ -82,7 +83,7 @@ def horizontal_line(self, name):
|
||||
)
|
||||
|
||||
def status(self):
|
||||
"""Print debug message with status info for the child."""
|
||||
"""Print debug message with status info for the minion."""
|
||||
if self.debug:
|
||||
canon, echo = self.get_canon_echo_attrs()
|
||||
sys.stderr.write("canon: %s, echo: %s\n" % (
|
||||
@@ -94,12 +95,12 @@ def status(self):
|
||||
sys.stderr.write("\n")
|
||||
|
||||
def input_on(self):
|
||||
"""True if keyboard input is enabled on the master_fd pty."""
|
||||
"""True if keyboard input is enabled on the controller_fd pty."""
|
||||
return self.get_canon_echo_attrs() == (False, False)
|
||||
|
||||
def background(self):
|
||||
"""True if pgid is in a background pgroup of master_fd's terminal."""
|
||||
return self.pgid != os.tcgetpgrp(self.master_fd)
|
||||
"""True if pgid is in a background pgroup of controller_fd's tty."""
|
||||
return self.pgid != os.tcgetpgrp(self.controller_fd)
|
||||
|
||||
def tstp(self):
|
||||
"""Send SIGTSTP to the controlled process."""
|
||||
@@ -115,18 +116,18 @@ def cont(self):
|
||||
def fg(self):
|
||||
self.horizontal_line("fg")
|
||||
with log.ignore_signal(signal.SIGTTOU):
|
||||
os.tcsetpgrp(self.master_fd, os.getpgid(self.pid))
|
||||
os.tcsetpgrp(self.controller_fd, os.getpgid(self.pid))
|
||||
time.sleep(self.sleep_time)
|
||||
|
||||
def bg(self):
|
||||
self.horizontal_line("bg")
|
||||
with log.ignore_signal(signal.SIGTTOU):
|
||||
os.tcsetpgrp(self.master_fd, os.getpgrp())
|
||||
os.tcsetpgrp(self.controller_fd, os.getpgrp())
|
||||
time.sleep(self.sleep_time)
|
||||
|
||||
def write(self, byte_string):
|
||||
self.horizontal_line("write '%s'" % byte_string.decode("utf-8"))
|
||||
os.write(self.master_fd, byte_string)
|
||||
os.write(self.controller_fd, byte_string)
|
||||
|
||||
def wait(self, condition):
|
||||
start = time.time()
|
||||
@@ -156,50 +157,51 @@ def wait_running(self):
|
||||
|
||||
|
||||
class PseudoShell(object):
|
||||
"""Sets up master and child processes with a PTY.
|
||||
"""Sets up controller and minion processes with a PTY.
|
||||
|
||||
You can create a ``PseudoShell`` if you want to test how some
|
||||
function responds to terminal input. This is a pseudo-shell from a
|
||||
job control perspective; ``master_function`` and ``child_function``
|
||||
are set up with a pseudoterminal (pty) so that the master can drive
|
||||
the child through process control signals and I/O.
|
||||
job control perspective; ``controller_function`` and ``minion_function``
|
||||
are set up with a pseudoterminal (pty) so that the controller can drive
|
||||
the minion through process control signals and I/O.
|
||||
|
||||
The two functions should have signatures like this::
|
||||
|
||||
def master_function(proc, ctl, **kwargs)
|
||||
def child_function(**kwargs)
|
||||
def controller_function(proc, ctl, **kwargs)
|
||||
def minion_function(**kwargs)
|
||||
|
||||
``master_function`` is spawned in its own process and passed three
|
||||
``controller_function`` is spawned in its own process and passed three
|
||||
arguments:
|
||||
|
||||
proc
|
||||
the ``multiprocessing.Process`` object representing the child
|
||||
the ``multiprocessing.Process`` object representing the minion
|
||||
ctl
|
||||
a ``ProcessController`` object tied to the child
|
||||
a ``ProcessController`` object tied to the minion
|
||||
kwargs
|
||||
keyword arguments passed from ``PseudoShell.start()``.
|
||||
|
||||
``child_function`` is only passed ``kwargs`` delegated from
|
||||
``minion_function`` is only passed ``kwargs`` delegated from
|
||||
``PseudoShell.start()``.
|
||||
|
||||
The ``ctl.master_fd`` will have its ``master_fd`` connected to
|
||||
``sys.stdin`` in the child process. Both processes will share the
|
||||
The ``ctl.controller_fd`` will have its ``controller_fd`` connected to
|
||||
``sys.stdin`` in the minion process. Both processes will share the
|
||||
same ``sys.stdout`` and ``sys.stderr`` as the process instantiating
|
||||
``PseudoShell``.
|
||||
|
||||
Here are the relationships between processes created::
|
||||
|
||||
._________________________________________________________.
|
||||
| Child Process | pid 2
|
||||
| - runs child_function | pgroup 2
|
||||
| Minion Process | pid 2
|
||||
| - runs minion_function | pgroup 2
|
||||
|_________________________________________________________| session 1
|
||||
^
|
||||
| create process with master_fd connected to stdin
|
||||
| create process with controller_fd connected to stdin
|
||||
| stdout, stderr are the same as caller
|
||||
._________________________________________________________.
|
||||
| Master Process | pid 1
|
||||
| - runs master_function | pgroup 1
|
||||
| - uses ProcessController and master_fd to control child | session 1
|
||||
| Controller Process | pid 1
|
||||
| - runs controller_function | pgroup 1
|
||||
| - uses ProcessController and controller_fd to | session 1
|
||||
| control minion |
|
||||
|_________________________________________________________|
|
||||
^
|
||||
| create process
|
||||
@@ -207,51 +209,51 @@ def child_function(**kwargs)
|
||||
._________________________________________________________.
|
||||
| Caller | pid 0
|
||||
| - Constructs, starts, joins PseudoShell | pgroup 0
|
||||
| - provides master_function, child_function | session 0
|
||||
| - provides controller_function, minion_function | session 0
|
||||
|_________________________________________________________|
|
||||
|
||||
"""
|
||||
def __init__(self, master_function, child_function):
|
||||
def __init__(self, controller_function, minion_function):
|
||||
self.proc = None
|
||||
self.master_function = master_function
|
||||
self.child_function = child_function
|
||||
self.controller_function = controller_function
|
||||
self.minion_function = minion_function
|
||||
|
||||
# these can be optionally set to change defaults
|
||||
self.controller_timeout = 1
|
||||
self.sleep_time = 0
|
||||
|
||||
def start(self, **kwargs):
|
||||
"""Start the master and child processes.
|
||||
"""Start the controller and minion processes.
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): arbitrary keyword arguments that will be
|
||||
passed to master and child functions
|
||||
passed to controller and minion functions
|
||||
|
||||
The master process will create the child, then call
|
||||
``master_function``. The child process will call
|
||||
``child_function``.
|
||||
The controller process will create the minion, then call
|
||||
``controller_function``. The minion process will call
|
||||
``minion_function``.
|
||||
|
||||
"""
|
||||
self.proc = multiprocessing.Process(
|
||||
target=PseudoShell._set_up_and_run_master_function,
|
||||
args=(self.master_function, self.child_function,
|
||||
self.proc = fork_context.Process(
|
||||
target=PseudoShell._set_up_and_run_controller_function,
|
||||
args=(self.controller_function, self.minion_function,
|
||||
self.controller_timeout, self.sleep_time),
|
||||
kwargs=kwargs,
|
||||
)
|
||||
self.proc.start()
|
||||
|
||||
def join(self):
|
||||
"""Wait for the child process to finish, and return its exit code."""
|
||||
"""Wait for the minion process to finish, and return its exit code."""
|
||||
self.proc.join()
|
||||
return self.proc.exitcode
|
||||
|
||||
@staticmethod
|
||||
def _set_up_and_run_child_function(
|
||||
tty_name, stdout_fd, stderr_fd, ready, child_function, **kwargs):
|
||||
"""Child process wrapper for PseudoShell.
|
||||
def _set_up_and_run_minion_function(
|
||||
tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs):
|
||||
"""Minion process wrapper for PseudoShell.
|
||||
|
||||
Handles the mechanics of setting up a PTY, then calls
|
||||
``child_function``.
|
||||
``minion_function``.
|
||||
|
||||
"""
|
||||
# new process group, like a command or pipeline launched by a shell
|
||||
@@ -266,45 +268,45 @@ def _set_up_and_run_child_function(
|
||||
|
||||
if kwargs.get("debug"):
|
||||
sys.stderr.write(
|
||||
"child: stdin.isatty(): %s\n" % sys.stdin.isatty())
|
||||
"minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
|
||||
|
||||
# tell the parent that we're really running
|
||||
if kwargs.get("debug"):
|
||||
sys.stderr.write("child: ready!\n")
|
||||
sys.stderr.write("minion: ready!\n")
|
||||
ready.value = True
|
||||
|
||||
try:
|
||||
child_function(**kwargs)
|
||||
minion_function(**kwargs)
|
||||
except BaseException:
|
||||
traceback.print_exc()
|
||||
|
||||
@staticmethod
|
||||
def _set_up_and_run_master_function(
|
||||
master_function, child_function, controller_timeout, sleep_time,
|
||||
**kwargs):
|
||||
"""Set up a pty, spawn a child process, and execute master_function.
|
||||
def _set_up_and_run_controller_function(
|
||||
controller_function, minion_function, controller_timeout,
|
||||
sleep_time, **kwargs):
|
||||
"""Set up a pty, spawn a minion process, execute controller_function.
|
||||
|
||||
Handles the mechanics of setting up a PTY, then calls
|
||||
``master_function``.
|
||||
``controller_function``.
|
||||
|
||||
"""
|
||||
os.setsid() # new session; this process is the controller
|
||||
|
||||
master_fd, child_fd = os.openpty()
|
||||
pty_name = os.ttyname(child_fd)
|
||||
controller_fd, minion_fd = os.openpty()
|
||||
pty_name = os.ttyname(minion_fd)
|
||||
|
||||
# take controlling terminal
|
||||
pty_fd = os.open(pty_name, os.O_RDWR)
|
||||
os.close(pty_fd)
|
||||
|
||||
ready = multiprocessing.Value('i', False)
|
||||
child_process = multiprocessing.Process(
|
||||
target=PseudoShell._set_up_and_run_child_function,
|
||||
minion_process = multiprocessing.Process(
|
||||
target=PseudoShell._set_up_and_run_minion_function,
|
||||
args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(),
|
||||
ready, child_function),
|
||||
ready, minion_function),
|
||||
kwargs=kwargs,
|
||||
)
|
||||
child_process.start()
|
||||
minion_process.start()
|
||||
|
||||
# wait for subprocess to be running and connected.
|
||||
while not ready.value:
|
||||
@@ -315,30 +317,31 @@ def _set_up_and_run_master_function(
|
||||
sys.stderr.write("pid: %d\n" % os.getpid())
|
||||
sys.stderr.write("pgid: %d\n" % os.getpgrp())
|
||||
sys.stderr.write("sid: %d\n" % os.getsid(0))
|
||||
sys.stderr.write("tcgetpgrp: %d\n" % os.tcgetpgrp(master_fd))
|
||||
sys.stderr.write("tcgetpgrp: %d\n" % os.tcgetpgrp(controller_fd))
|
||||
sys.stderr.write("\n")
|
||||
|
||||
child_pgid = os.getpgid(child_process.pid)
|
||||
sys.stderr.write("child pid: %d\n" % child_process.pid)
|
||||
sys.stderr.write("child pgid: %d\n" % child_pgid)
|
||||
sys.stderr.write("child sid: %d\n" % os.getsid(child_process.pid))
|
||||
minion_pgid = os.getpgid(minion_process.pid)
|
||||
sys.stderr.write("minion pid: %d\n" % minion_process.pid)
|
||||
sys.stderr.write("minion pgid: %d\n" % minion_pgid)
|
||||
sys.stderr.write(
|
||||
"minion sid: %d\n" % os.getsid(minion_process.pid))
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
# set up master to ignore SIGTSTP, like a shell
|
||||
# set up controller to ignore SIGTSTP, like a shell
|
||||
signal.signal(signal.SIGTSTP, signal.SIG_IGN)
|
||||
|
||||
# call the master function once the child is ready
|
||||
# call the controller function once the minion is ready
|
||||
try:
|
||||
controller = ProcessController(
|
||||
child_process.pid, master_fd, debug=kwargs.get("debug"))
|
||||
minion_process.pid, controller_fd, debug=kwargs.get("debug"))
|
||||
controller.timeout = controller_timeout
|
||||
controller.sleep_time = sleep_time
|
||||
error = master_function(child_process, controller, **kwargs)
|
||||
error = controller_function(minion_process, controller, **kwargs)
|
||||
except BaseException:
|
||||
error = 1
|
||||
traceback.print_exc()
|
||||
|
||||
child_process.join()
|
||||
minion_process.join()
|
||||
|
||||
# return whether either the parent or child failed
|
||||
return error or child_process.exitcode
|
||||
# return whether either the parent or minion failed
|
||||
return error or minion_process.exitcode
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 15, 1)
|
||||
spack_version_info = (0, 15, 4)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -18,10 +18,13 @@ class ABI(object):
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(self, parent, child):
|
||||
"""Return true if parent and child have ABI compatible targets."""
|
||||
return not parent.architecture or not child.architecture or \
|
||||
parent.architecture == child.architecture
|
||||
def architecture_compatible(self, target, constraint):
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
as architecture ABI compatible to constraint."""
|
||||
return not target.architecture or not constraint.architecture or \
|
||||
target.architecture.satisfies(constraint.architecture)
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
@@ -107,8 +110,8 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(self, parent, child, **kwargs):
|
||||
"""Returns true iff a parent and child spec are ABI compatible"""
|
||||
def compatible(self, target, constraint, **kwargs):
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
loosematch = kwargs.get('loose', False)
|
||||
return self.architecture_compatible(parent, child) and \
|
||||
self.compiler_compatible(parent, child, loose=loosematch)
|
||||
return self.architecture_compatible(target, constraint) and \
|
||||
self.compiler_compatible(target, constraint, loose=loosematch)
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"""
|
||||
This module contains all the elements that are required to create an
|
||||
architecture object. These include, the target processor, the operating system,
|
||||
and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
|
||||
and the architecture platform (i.e. cray, darwin, linux, etc) classes.
|
||||
|
||||
On a multiple architecture machine, the architecture spec field can be set to
|
||||
build a package against any target and operating system that is present on the
|
||||
@@ -217,7 +217,7 @@ def optimization_flags(self, compiler):
|
||||
if isinstance(compiler, spack.spec.CompilerSpec):
|
||||
compiler = spack.compilers.compilers_for_spec(compiler).pop()
|
||||
try:
|
||||
compiler_version = compiler.get_real_version()
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
@@ -233,10 +233,14 @@ class Platform(object):
|
||||
Will return a instance of it once it is returned.
|
||||
"""
|
||||
|
||||
priority = None # Subclass sets number. Controls detection order
|
||||
priority = None # Subclass sets number. Controls detection order
|
||||
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ['elf']
|
||||
|
||||
front_end = None
|
||||
back_end = None
|
||||
default = None # The default back end target. On cray ivybridge
|
||||
default = None # The default back end target. On cray ivybridge
|
||||
|
||||
front_os = None
|
||||
back_os = None
|
||||
@@ -436,6 +440,12 @@ def to_dict(self):
|
||||
('target', self.target.to_dict_or_value())])
|
||||
return syaml_dict([('arch', d)])
|
||||
|
||||
def to_spec(self):
|
||||
"""Convert this Arch to an anonymous Spec with architecture defined."""
|
||||
spec = spack.spec.Spec()
|
||||
spec.architecture = spack.spec.ArchSpec(str(self))
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
spec = spack.spec.ArchSpec.from_dict(d)
|
||||
@@ -518,6 +528,14 @@ def platform():
|
||||
|
||||
|
||||
@memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine.
|
||||
|
||||
See ``sys_type()``.
|
||||
"""
|
||||
return Arch(platform(), 'default_os', 'default_target')
|
||||
|
||||
|
||||
def sys_type():
|
||||
"""Print out the "default" platform-os-target tuple for this machine.
|
||||
|
||||
@@ -530,8 +548,7 @@ def sys_type():
|
||||
architectures.
|
||||
|
||||
"""
|
||||
arch = Arch(platform(), 'default_os', 'default_target')
|
||||
return str(arch)
|
||||
return str(default_arch())
|
||||
|
||||
|
||||
@memoized
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
import tempfile
|
||||
import hashlib
|
||||
import glob
|
||||
import platform
|
||||
|
||||
from contextlib import closing
|
||||
import ruamel.yaml as yaml
|
||||
@@ -36,7 +35,6 @@
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.gpg import Gpg
|
||||
import spack.architecture as architecture
|
||||
|
||||
_build_cache_relative_path = 'build_cache'
|
||||
|
||||
@@ -466,8 +464,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
web_util.push_to_url(
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
|
||||
tty.msg('Buildache for "%s" written to \n %s' %
|
||||
(spec, remote_spackfile_path))
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
|
||||
try:
|
||||
# create an index.html for the build_cache directory so specs can be
|
||||
@@ -498,6 +496,7 @@ def download_tarball(spec):
|
||||
|
||||
# stage the tarball into standard place
|
||||
stage = Stage(url, name="build_cache", keep=True)
|
||||
stage.create()
|
||||
try:
|
||||
stage.fetch()
|
||||
return stage.save_filename
|
||||
@@ -520,16 +519,16 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
for filename in buildinfo['relocate_binaries']:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
if (spec.architecture.platform == 'darwin' or
|
||||
spec.architecture.platform == 'test' and
|
||||
platform.system().lower() == 'darwin'):
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names,
|
||||
old_layout_root)
|
||||
if (spec.architecture.platform == 'linux' or
|
||||
spec.architecture.platform == 'test' and
|
||||
platform.system().lower() == 'linux'):
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names,
|
||||
old_layout_root)
|
||||
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(
|
||||
cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if 'elf' in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(
|
||||
cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
@@ -602,29 +601,23 @@ def is_backup_file(file):
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
|
||||
# If we are installing back to the same location don't replace anything
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_layout_root != new_layout_root:
|
||||
paths_to_relocate = [old_spack_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
files_to_relocate = [os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
if (spec.architecture.platform == 'darwin' or
|
||||
spec.architecture.platform == 'test' and
|
||||
platform.system().lower() == 'darwin'):
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix, rel,
|
||||
old_prefix,
|
||||
new_prefix)
|
||||
if (spec.architecture.platform == 'linux' or
|
||||
spec.architecture.platform == 'test' and
|
||||
platform.system().lower() == 'linux'):
|
||||
|
||||
if 'elf' in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
@@ -646,6 +639,13 @@ def is_backup_file(file):
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
paths_to_relocate = [old_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate,
|
||||
old_prefix, new_prefix,
|
||||
@@ -653,6 +653,17 @@ def is_backup_file(file):
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names,
|
||||
old_layout_root, new_layout_root,
|
||||
old_prefix, new_prefix,
|
||||
old_spack_prefix,
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
@@ -828,26 +839,24 @@ def get_spec(spec=None, force=False):
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding buildcaches in %s" % mirror_dir)
|
||||
tty.debug('Finding buildcaches in {0}'.format(mirror_dir))
|
||||
link = url_util.join(fetch_url_build_cache, specfile_name)
|
||||
urls.add(link)
|
||||
|
||||
else:
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding buildcaches at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
link = url_util.join(fetch_url_build_cache, specfile_name)
|
||||
urls.add(link)
|
||||
|
||||
return try_download_specs(urls=urls, force=force)
|
||||
|
||||
|
||||
def get_specs(allarch=False):
|
||||
def get_specs():
|
||||
"""
|
||||
Get spec.yaml's for build caches available on mirror
|
||||
"""
|
||||
global _cached_specs
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'default_os', 'default_target')
|
||||
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
@@ -857,8 +866,8 @@ def get_specs(allarch=False):
|
||||
fetch_url_build_cache = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding buildcaches at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
|
||||
index_url = url_util.join(fetch_url_build_cache, 'index.json')
|
||||
|
||||
@@ -867,10 +876,9 @@ def get_specs(allarch=False):
|
||||
index_url, 'application/json')
|
||||
index_object = codecs.getreader('utf-8')(file_stream).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Failed to read index {0}'.format(index_url))
|
||||
tty.debug(url_err)
|
||||
# Just return whatever specs we may already have cached
|
||||
return _cached_specs
|
||||
tty.debug('Failed to read index {0}'.format(index_url), url_err, 1)
|
||||
# Continue on to the next mirror
|
||||
continue
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
index_file_path = os.path.join(tmpdir, 'index.json')
|
||||
@@ -885,9 +893,7 @@ def get_specs(allarch=False):
|
||||
spec_list = db.query_local(installed=False)
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
spec_arch = architecture.arch_for_spec(indexed_spec.architecture)
|
||||
if (allarch is True or spec_arch == arch):
|
||||
_cached_specs.add(indexed_spec)
|
||||
_cached_specs.add(indexed_spec)
|
||||
|
||||
return _cached_specs
|
||||
|
||||
@@ -909,15 +915,15 @@ def get_keys(install=False, trust=False, force=False):
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding public keys in %s" % mirror_dir)
|
||||
tty.debug('Finding public keys in {0}'.format(mirror_dir))
|
||||
files = os.listdir(str(mirror_dir))
|
||||
for file in files:
|
||||
if re.search(r'\.key', file) or re.search(r'\.pub', file):
|
||||
link = url_util.join(fetch_url_build_cache, file)
|
||||
keys.add(link)
|
||||
else:
|
||||
tty.msg("Finding public keys at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding public keys at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
# For s3 mirror need to request index.html directly
|
||||
p, links = web_util.spider(
|
||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
||||
@@ -935,14 +941,14 @@ def get_keys(install=False, trust=False, force=False):
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
continue
|
||||
tty.msg('Found key %s' % link)
|
||||
tty.debug('Found key {0}'.format(link))
|
||||
if install:
|
||||
if trust:
|
||||
Gpg.trust(stage.save_filename)
|
||||
tty.msg('Added this key to trusted keys.')
|
||||
tty.debug('Added this key to trusted keys.')
|
||||
else:
|
||||
tty.msg('Will not add this key to trusted keys.'
|
||||
'Use -t to install all downloaded keys')
|
||||
tty.debug('Will not add this key to trusted keys.'
|
||||
'Use -t to install all downloaded keys')
|
||||
|
||||
|
||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
@@ -1029,7 +1035,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||
tty.msg('Checking for built specs at %s' % mirror.fetch_url)
|
||||
tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@
|
||||
calls you can make from within the install() function.
|
||||
"""
|
||||
import re
|
||||
import inspect
|
||||
import multiprocessing
|
||||
import os
|
||||
import shutil
|
||||
@@ -45,16 +44,19 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.filesystem import mkdirp, install, install_tree
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, fork_context
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.package
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
import spack.install_test
|
||||
import spack.architecture as arch
|
||||
import spack.util.path
|
||||
from spack.util.string import plural
|
||||
from spack.util.environment import (
|
||||
env_flag, filter_system_paths, get_path, is_system_path,
|
||||
@@ -62,7 +64,7 @@
|
||||
from spack.util.environment import system_dirs
|
||||
from spack.error import NoLibrariesError, NoHeadersError
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.module_cmd import load_module, get_path_from_module, module
|
||||
from spack.util.module_cmd import load_module, path_from_modules, module
|
||||
from spack.util.log_parse import parse_log_events, make_log_context
|
||||
|
||||
|
||||
@@ -174,6 +176,14 @@ def clean_environment():
|
||||
for v in build_system_vars:
|
||||
env.unset(v)
|
||||
|
||||
# Unset mpi environment vars. These flags should only be set by
|
||||
# mpi providers for packages with mpi dependencies
|
||||
mpi_vars = [
|
||||
'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
|
||||
]
|
||||
for v in mpi_vars:
|
||||
env.unset(v)
|
||||
|
||||
build_lang = spack.config.get('config:build_language')
|
||||
if build_lang:
|
||||
# Override language-related variables. This can be used to force
|
||||
@@ -443,7 +453,6 @@ def _set_variables_for_single_module(pkg, module):
|
||||
|
||||
jobs = spack.config.get('config:build_jobs', 16) if pkg.parallel else 1
|
||||
jobs = min(jobs, multiprocessing.cpu_count())
|
||||
assert jobs is not None, "no default set for config:build_jobs"
|
||||
|
||||
m = module
|
||||
m.make_jobs = jobs
|
||||
@@ -634,7 +643,7 @@ def get_rpaths(pkg):
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
@@ -698,32 +707,48 @@ def load_external_modules(pkg):
|
||||
pkg (PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
if dep.external_module:
|
||||
load_module(dep.external_module)
|
||||
external_modules = dep.external_modules or []
|
||||
for external_module in external_modules:
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty):
|
||||
def setup_package(pkg, dirty, context='build'):
|
||||
"""Execute all environment setup routines."""
|
||||
build_env = EnvironmentModifications()
|
||||
env = EnvironmentModifications()
|
||||
|
||||
# clean environment
|
||||
if not dirty:
|
||||
clean_environment()
|
||||
|
||||
set_compiler_environment_variables(pkg, build_env)
|
||||
set_build_environment_variables(pkg, build_env, dirty)
|
||||
pkg.architecture.platform.setup_platform_environment(pkg, build_env)
|
||||
# setup compilers and build tools for build contexts
|
||||
need_compiler = context == 'build' or (context == 'test' and
|
||||
pkg.test_requires_compiler)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env)
|
||||
set_build_environment_variables(pkg, env, dirty)
|
||||
|
||||
build_env.extend(
|
||||
modifications_from_dependencies(pkg.spec, context='build')
|
||||
)
|
||||
# architecture specific setup
|
||||
pkg.architecture.platform.setup_platform_environment(pkg, env)
|
||||
|
||||
if (not dirty) and (not build_env.is_unset('CPATH')):
|
||||
tty.debug("A dependency has updated CPATH, this may lead pkg-config"
|
||||
" to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
if context == 'build':
|
||||
# recursive post-order dependency information
|
||||
env.extend(
|
||||
modifications_from_dependencies(pkg.spec, context=context)
|
||||
)
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
pkg.setup_build_environment(build_env)
|
||||
if (not dirty) and (not env.is_unset('CPATH')):
|
||||
tty.debug("A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
|
||||
# setup package itself
|
||||
set_module_variables_for_package(pkg)
|
||||
pkg.setup_build_environment(env)
|
||||
elif context == 'test':
|
||||
import spack.user_environment as uenv # avoid circular import
|
||||
env.extend(uenv.environment_modifications_for_spec(pkg.spec))
|
||||
set_module_variables_for_package(pkg)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
# Loading modules, in particular if they are meant to be used outside
|
||||
# of Spack, can change environment variables that are relevant to the
|
||||
@@ -733,15 +758,16 @@ def setup_package(pkg, dirty):
|
||||
# unnecessary. Modules affecting these variables will be overwritten anyway
|
||||
with preserve_environment('CC', 'CXX', 'FC', 'F77'):
|
||||
# All module loads that otherwise would belong in previous
|
||||
# functions have to occur after the build_env object has its
|
||||
# functions have to occur after the env object has its
|
||||
# modifications applied. Otherwise the environment modifications
|
||||
# could undo module changes, such as unsetting LD_LIBRARY_PATH
|
||||
# after a module changes it.
|
||||
for mod in pkg.compiler.modules:
|
||||
# Fixes issue https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||
load_module("cce")
|
||||
load_module(mod)
|
||||
if need_compiler:
|
||||
for mod in pkg.compiler.modules:
|
||||
# Fixes issue https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||
load_module("cce")
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
@@ -755,12 +781,12 @@ def setup_package(pkg, dirty):
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
build_env.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
env.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(build_env, tty.warn)
|
||||
build_env.apply_modifications()
|
||||
validate(env, tty.warn)
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def modifications_from_dependencies(spec, context):
|
||||
@@ -780,7 +806,8 @@ def modifications_from_dependencies(spec, context):
|
||||
deptype_and_method = {
|
||||
'build': (('build', 'link', 'test'),
|
||||
'setup_dependent_build_environment'),
|
||||
'run': (('link', 'run'), 'setup_dependent_run_environment')
|
||||
'run': (('link', 'run'), 'setup_dependent_run_environment'),
|
||||
'test': (('link', 'run', 'test'), 'setup_dependent_run_environment')
|
||||
}
|
||||
deptype, method = deptype_and_method[context]
|
||||
|
||||
@@ -794,7 +821,7 @@ def modifications_from_dependencies(spec, context):
|
||||
return env
|
||||
|
||||
|
||||
def fork(pkg, function, dirty, fake):
|
||||
def fork(pkg, function, dirty, fake, context='build', **kwargs):
|
||||
"""Fork a child process to do part of a spack build.
|
||||
|
||||
Args:
|
||||
@@ -806,6 +833,8 @@ def fork(pkg, function, dirty, fake):
|
||||
dirty (bool): If True, do NOT clean the environment before
|
||||
building.
|
||||
fake (bool): If True, skip package setup b/c it's not a real build
|
||||
context (string): If 'build', setup build environment. If 'test', setup
|
||||
test environment.
|
||||
|
||||
Usage::
|
||||
|
||||
@@ -834,7 +863,7 @@ def child_process(child_pipe, input_stream):
|
||||
|
||||
try:
|
||||
if not fake:
|
||||
setup_package(pkg, dirty=dirty)
|
||||
setup_package(pkg, dirty=dirty, context=context)
|
||||
return_value = function()
|
||||
child_pipe.send(return_value)
|
||||
|
||||
@@ -852,19 +881,29 @@ def child_process(child_pipe, input_stream):
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
package_context = get_package_context(tb)
|
||||
if exc_type is not spack.install_test.TestFailure:
|
||||
package_context = get_package_context(traceback.extract_tb(tb))
|
||||
else:
|
||||
package_context = []
|
||||
|
||||
build_log = None
|
||||
if hasattr(pkg, 'log_path'):
|
||||
if context == 'build' and hasattr(pkg, 'log_path'):
|
||||
build_log = pkg.log_path
|
||||
|
||||
test_log = None
|
||||
if context == 'test':
|
||||
test_log = os.path.join(
|
||||
pkg.test_suite.stage,
|
||||
spack.install_test.TestSuite.test_log_name(pkg.spec))
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, str(exc))
|
||||
|
||||
ce = ChildError(msg,
|
||||
exc_type.__module__,
|
||||
exc_type.__name__,
|
||||
tb_string, build_log, package_context)
|
||||
tb_string, package_context,
|
||||
build_log, test_log)
|
||||
child_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
@@ -877,7 +916,7 @@ def child_process(child_pipe, input_stream):
|
||||
if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'):
|
||||
input_stream = os.fdopen(os.dup(sys.stdin.fileno()))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
p = fork_context.Process(
|
||||
target=child_process, args=(child_pipe, input_stream))
|
||||
p.start()
|
||||
|
||||
@@ -917,8 +956,8 @@ def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
Args:
|
||||
traceback (traceback): A traceback from some exception raised during
|
||||
install
|
||||
traceback (list of tuples): output from traceback.extract_tb() or
|
||||
traceback.extract_stack()
|
||||
context (int): Lines of context to show before and after the line
|
||||
where the error happened
|
||||
|
||||
@@ -927,51 +966,44 @@ def get_package_context(traceback, context=3):
|
||||
from there.
|
||||
|
||||
"""
|
||||
def make_stack(tb, stack=None):
|
||||
"""Tracebacks come out of the system in caller -> callee order. Return
|
||||
an array in callee -> caller order so we can traverse it."""
|
||||
if stack is None:
|
||||
stack = []
|
||||
if tb is not None:
|
||||
make_stack(tb.tb_next, stack)
|
||||
stack.append(tb)
|
||||
return stack
|
||||
|
||||
stack = make_stack(traceback)
|
||||
|
||||
for tb in stack:
|
||||
frame = tb.tb_frame
|
||||
if 'self' in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
obj = frame.f_locals['self']
|
||||
if isinstance(obj, spack.package.PackageBase):
|
||||
for filename, lineno, function, text in reversed(traceback):
|
||||
if 'package.py' in filename or 'spack/build_systems' in filename:
|
||||
if function not in ('run_test', '_run_test_helper'):
|
||||
# We are in a package and not one of the listed methods
|
||||
# We exclude these methods because we expect errors in them to
|
||||
# be the result of user tests failing, and we show the tests
|
||||
# instead.
|
||||
break
|
||||
|
||||
# Package files have a line added at import time, so we adjust the lineno
|
||||
# when we are getting context from a package file instead of a base class
|
||||
adjust = 1 if spack.paths.is_package_file(filename) else 0
|
||||
lineno = lineno - adjust
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
lines = [
|
||||
'{0}:{1:d}, in {2}:'.format(
|
||||
inspect.getfile(frame.f_code),
|
||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
||||
frame.f_code.co_name
|
||||
filename,
|
||||
lineno,
|
||||
function
|
||||
)
|
||||
]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
# Subtract 1 because f_lineno is 0-indexed.
|
||||
fun_lineno = frame.f_lineno - start - 1
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx:fun_lineno + context + 1]
|
||||
# Adjust for import mangling of package files.
|
||||
with open(filename, 'r') as f:
|
||||
sourcelines = f.readlines()
|
||||
start = max(0, lineno - context - 1)
|
||||
sourcelines = sourcelines[start:lineno + context + 1]
|
||||
|
||||
for i, line in enumerate(sourcelines):
|
||||
is_error = start_ctx + i == fun_lineno
|
||||
i = i + adjust # adjusting for import munging again
|
||||
is_error = start + i == lineno
|
||||
mark = '>> ' if is_error else ' '
|
||||
# Add start to get lineno relative to start of file, not function.
|
||||
marked = ' {0}{1:-6d}{2}'.format(
|
||||
mark, start + start_ctx + i, line.rstrip())
|
||||
mark, start + i, line.rstrip())
|
||||
if is_error:
|
||||
marked = colorize('@R{%s}' % cescape(marked))
|
||||
lines.append(marked)
|
||||
@@ -1025,14 +1057,15 @@ class ChildError(InstallError):
|
||||
# context instead of Python context.
|
||||
build_errors = [('spack.util.executable', 'ProcessError')]
|
||||
|
||||
def __init__(self, msg, module, classname, traceback_string, build_log,
|
||||
context):
|
||||
def __init__(self, msg, module, classname, traceback_string, context,
|
||||
build_log, test_log):
|
||||
super(ChildError, self).__init__(msg)
|
||||
self.module = module
|
||||
self.name = classname
|
||||
self.traceback = traceback_string
|
||||
self.build_log = build_log
|
||||
self.context = context
|
||||
self.build_log = build_log
|
||||
self.test_log = test_log
|
||||
|
||||
@property
|
||||
def long_message(self):
|
||||
@@ -1041,21 +1074,12 @@ def long_message(self):
|
||||
|
||||
if (self.module, self.name) in ChildError.build_errors:
|
||||
# The error happened in some external executed process. Show
|
||||
# the build log with errors or warnings highlighted.
|
||||
# the log with errors or warnings highlighted.
|
||||
if self.build_log and os.path.exists(self.build_log):
|
||||
errors, warnings = parse_log_events(self.build_log)
|
||||
nerr = len(errors)
|
||||
nwar = len(warnings)
|
||||
if nerr > 0:
|
||||
# If errors are found, only display errors
|
||||
out.write(
|
||||
"\n%s found in build log:\n" % plural(nerr, 'error'))
|
||||
out.write(make_log_context(errors))
|
||||
elif nwar > 0:
|
||||
# If no errors are found but warnings are, display warnings
|
||||
out.write(
|
||||
"\n%s found in build log:\n" % plural(nwar, 'warning'))
|
||||
out.write(make_log_context(warnings))
|
||||
write_log_summary(out, 'build', self.build_log)
|
||||
|
||||
if self.test_log and os.path.exists(self.test_log):
|
||||
write_log_summary(out, 'test', self.test_log)
|
||||
|
||||
else:
|
||||
# The error happened in in the Python code, so try to show
|
||||
@@ -1072,6 +1096,10 @@ def long_message(self):
|
||||
out.write('See build log for details:\n')
|
||||
out.write(' %s\n' % self.build_log)
|
||||
|
||||
if self.test_log and os.path.exists(self.test_log):
|
||||
out.write('See test log for details:\n')
|
||||
out.write(' %s\n' % self.test_log)
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
def __str__(self):
|
||||
@@ -1088,13 +1116,16 @@ def __reduce__(self):
|
||||
self.module,
|
||||
self.name,
|
||||
self.traceback,
|
||||
self.context,
|
||||
self.build_log,
|
||||
self.context)
|
||||
self.test_log)
|
||||
|
||||
|
||||
def _make_child_error(msg, module, name, traceback, build_log, context):
|
||||
def _make_child_error(msg, module, name, traceback, context,
|
||||
build_log, test_log):
|
||||
"""Used by __reduce__ in ChildError to reconstruct pickled errors."""
|
||||
return ChildError(msg, module, name, traceback, build_log, context)
|
||||
return ChildError(msg, module, name, traceback, context,
|
||||
build_log, test_log)
|
||||
|
||||
|
||||
class StopPhase(spack.error.SpackError):
|
||||
@@ -1105,3 +1136,30 @@ def __reduce__(self):
|
||||
|
||||
def _make_stop_phase(msg, long_msg):
|
||||
return StopPhase(msg, long_msg)
|
||||
|
||||
|
||||
def write_log_summary(out, log_type, log, last=None):
|
||||
errors, warnings = parse_log_events(log)
|
||||
nerr = len(errors)
|
||||
nwar = len(warnings)
|
||||
|
||||
if nerr > 0:
|
||||
if last and nerr > last:
|
||||
errors = errors[-last:]
|
||||
nerr = last
|
||||
|
||||
# If errors are found, only display errors
|
||||
out.write(
|
||||
"\n%s found in %s log:\n" %
|
||||
(plural(nerr, 'error'), log_type))
|
||||
out.write(make_log_context(errors))
|
||||
elif nwar > 0:
|
||||
if last and nwar > last:
|
||||
warnings = warnings[-last:]
|
||||
nwar = last
|
||||
|
||||
# If no errors are found but warnings are, display warnings
|
||||
out.write(
|
||||
"\n%s found in %s log:\n" %
|
||||
(plural(nwar, 'warning'), log_type))
|
||||
out.write(make_log_context(warnings))
|
||||
|
||||
@@ -308,13 +308,21 @@ def flags_to_build_system_args(self, flags):
|
||||
self.cmake_flag_args.append(libs_string.format(lang,
|
||||
libs_flags))
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
"""Returns the directory name to use when building the package
|
||||
|
||||
:return: name of the subdirectory for building the package
|
||||
"""
|
||||
return 'spack-build-%s' % self.spec.dag_hash(7)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""Returns the directory to use when building the package
|
||||
|
||||
:return: directory where to build the package
|
||||
"""
|
||||
return os.path.join(self.stage.path, 'spack-build')
|
||||
return os.path.join(self.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self):
|
||||
"""Produces a list containing all the arguments that must be passed to
|
||||
|
||||
@@ -12,8 +12,9 @@
|
||||
class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, svenevs
|
||||
"""
|
||||
maintainers = ['ax3l', 'svenevs']
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
# https://developer.nvidia.com/cuda-gpus
|
||||
@@ -25,6 +26,7 @@ class CudaPackage(PackageBase):
|
||||
'50', '52', '53',
|
||||
'60', '61', '62',
|
||||
'70', '72', '75',
|
||||
'80',
|
||||
]
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -48,6 +50,7 @@ def cuda_flags(arch_list):
|
||||
|
||||
# CUDA version vs Architecture
|
||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||
depends_on('cuda@:6.0', when='cuda_arch=10')
|
||||
depends_on('cuda@:6.5', when='cuda_arch=11')
|
||||
depends_on('cuda@2.1:6.5', when='cuda_arch=12')
|
||||
@@ -58,8 +61,8 @@ def cuda_flags(arch_list):
|
||||
|
||||
depends_on('cuda@5.0:10.2', when='cuda_arch=30')
|
||||
depends_on('cuda@5.0:10.2', when='cuda_arch=32')
|
||||
depends_on('cuda@5.0:10.2', when='cuda_arch=35')
|
||||
depends_on('cuda@6.5:10.2', when='cuda_arch=37')
|
||||
depends_on('cuda@5.0:', when='cuda_arch=35')
|
||||
depends_on('cuda@6.5:', when='cuda_arch=37')
|
||||
|
||||
depends_on('cuda@6.0:', when='cuda_arch=50')
|
||||
depends_on('cuda@6.5:', when='cuda_arch=52')
|
||||
@@ -73,6 +76,8 @@ def cuda_flags(arch_list):
|
||||
depends_on('cuda@9.0:', when='cuda_arch=72')
|
||||
depends_on('cuda@10.0:', when='cuda_arch=75')
|
||||
|
||||
depends_on('cuda@11.0:', when='cuda_arch=80')
|
||||
|
||||
# There are at least three cases to be aware of for compiler conflicts
|
||||
# 1. Linux x86_64
|
||||
# 2. Linux ppc64le
|
||||
@@ -88,12 +93,15 @@ def cuda_flags(arch_list):
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
|
||||
conflicts('%gcc@:4,10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform)
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1' + arch_platform)
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@9.2.88:10' + arch_platform)
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@10.2.89' + arch_platform)
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10' + arch_platform)
|
||||
conflicts('%pgi@:17,20:',
|
||||
when='+cuda ^cuda@10.1.105:10.2.89' + arch_platform)
|
||||
conflicts('%pgi@:17,20.2:', when='+cuda ^cuda@11.0.2' + arch_platform)
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
|
||||
conflicts('%clang@:3.7,4:',
|
||||
when='+cuda ^cuda@8.0:9.0' + arch_platform)
|
||||
@@ -104,7 +112,8 @@ def cuda_flags(arch_list):
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105' + arch_platform)
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243' + arch_platform)
|
||||
conflicts('%clang@:3.2,9.0:', when='+cuda ^cuda@10.2.89' + arch_platform)
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89' + arch_platform)
|
||||
conflicts('%clang@:5,10:', when='+cuda ^cuda@11.0.2' + arch_platform)
|
||||
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
@@ -119,6 +128,8 @@ def cuda_flags(arch_list):
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9' + arch_platform)
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4,10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8' + arch_platform)
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185' + arch_platform)
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10' + arch_platform)
|
||||
@@ -128,6 +139,7 @@ def cuda_flags(arch_list):
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130' + arch_platform)
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105' + arch_platform)
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89' + arch_platform)
|
||||
conflicts('%clang@:5,10.0:', when='+cuda ^cuda@11.0.2' + arch_platform)
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
@@ -141,11 +153,13 @@ def cuda_flags(arch_list):
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.0.2')
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@17:', when='+cuda ^cuda@:10.2.89')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.0.2')
|
||||
|
||||
# Mac OS X
|
||||
# platform = ' platform=darwin'
|
||||
@@ -156,7 +170,7 @@ def cuda_flags(arch_list):
|
||||
# `clang-apple@x.y.z as a possible fix.
|
||||
# Compiler conflicts will be eventual taken from here:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0:')
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
|
||||
@@ -1017,6 +1017,15 @@ def setup_run_environment(self, env):
|
||||
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
|
||||
|
||||
if self.spec.name in ('intel', 'intel-parallel-studio'):
|
||||
# this package provides compilers
|
||||
# TODO: fix check above when compilers are dependencies
|
||||
env.set('CC', self.prefix.bin.icc)
|
||||
env.set('CXX', self.prefix.bin.icpc)
|
||||
env.set('FC', self.prefix.bin.ifort)
|
||||
env.set('F77', self.prefix.bin.ifort)
|
||||
env.set('F90', self.prefix.bin.ifort)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
|
||||
55
lib/spack/spack/build_systems/maven.py
Normal file
55
lib/spack/spack/build_systems/maven.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
class MavenPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using the
|
||||
Maven build system. See https://maven.apache.org/index.html
|
||||
for more information.
|
||||
|
||||
This class provides the following phases that can be overridden:
|
||||
|
||||
* build
|
||||
* install
|
||||
"""
|
||||
# Default phases
|
||||
phases = ['build', 'install']
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = 'MavenPackage'
|
||||
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""The directory containing the ``pom.xml`` file."""
|
||||
return self.stage.source_path
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Compile code and package into a JAR file."""
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
mvn = which('mvn')
|
||||
if self.run_tests:
|
||||
mvn('verify')
|
||||
else:
|
||||
mvn('package', '-DskipTests')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Copy to installation prefix."""
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
install_tree('.', prefix)
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
@@ -91,7 +91,7 @@ def configure(self, spec, prefix):
|
||||
build_system_class = 'PythonPackage'
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ['test']
|
||||
build_time_test_callbacks = ['build_test']
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ['import_module_test']
|
||||
@@ -192,6 +192,10 @@ def build_scripts(self, spec, prefix):
|
||||
|
||||
self.setup_py('build_scripts', *args)
|
||||
|
||||
def build_scripts_args(self, spec, prefix):
|
||||
"""Arguments to pass to build_scripts."""
|
||||
return []
|
||||
|
||||
def clean(self, spec, prefix):
|
||||
"""Clean up temporary files from 'build' command."""
|
||||
args = self.clean_args(spec, prefix)
|
||||
@@ -357,7 +361,7 @@ def check_args(self, spec, prefix):
|
||||
|
||||
# Testing
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
"""Run unit tests after in-place build.
|
||||
|
||||
These tests are only run if the package actually has a 'test' command.
|
||||
|
||||
59
lib/spack/spack/build_systems/ruby.py
Normal file
59
lib/spack/spack/build_systems/ruby.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import glob
|
||||
import inspect
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RubyPackage(PackageBase):
|
||||
"""Specialized class for building Ruby gems.
|
||||
|
||||
This class provides two phases that can be overridden if required:
|
||||
|
||||
#. :py:meth:`~.RubyPackage.build`
|
||||
#. :py:meth:`~.RubyPackage.install`
|
||||
"""
|
||||
#: Phases of a Ruby package
|
||||
phases = ['build', 'install']
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'RubyPackage'
|
||||
|
||||
extends('ruby')
|
||||
|
||||
depends_on('ruby', type=('build', 'run'))
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Build a Ruby gem."""
|
||||
|
||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||
# rake.gemspec can be built without an existing rake installation
|
||||
gemspecs = glob.glob('*.gemspec')
|
||||
rakefiles = glob.glob('Rakefile')
|
||||
if gemspecs:
|
||||
inspect.getmodule(self).gem('build', '--norc', gemspecs[0])
|
||||
elif rakefiles:
|
||||
jobs = inspect.getmodule(self).make_jobs
|
||||
inspect.getmodule(self).rake('package', '-j{0}'.format(jobs))
|
||||
else:
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Install a Ruby gem.
|
||||
|
||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||
|
||||
gems = glob.glob('*.gem')
|
||||
if gems:
|
||||
inspect.getmodule(self).gem(
|
||||
'install', '--norc', '--ignore-dependencies', gems[0])
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
@@ -33,7 +33,7 @@ class SConsPackage(PackageBase):
|
||||
build_system_class = 'SConsPackage'
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ['test']
|
||||
build_time_test_callbacks = ['build_test']
|
||||
|
||||
depends_on('scons', type='build')
|
||||
|
||||
@@ -59,7 +59,7 @@ def install(self, spec, prefix):
|
||||
|
||||
# Testing
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
"""Run unit tests after build.
|
||||
|
||||
By default, does nothing. Override this if you want to
|
||||
|
||||
@@ -47,10 +47,10 @@ class WafPackage(PackageBase):
|
||||
build_system_class = 'WafPackage'
|
||||
|
||||
# Callback names for build-time test
|
||||
build_time_test_callbacks = ['test']
|
||||
build_time_test_callbacks = ['build_test']
|
||||
|
||||
# Callback names for install-time test
|
||||
install_time_test_callbacks = ['installtest']
|
||||
install_time_test_callbacks = ['install_test']
|
||||
|
||||
# Much like AutotoolsPackage does not require automake and autoconf
|
||||
# to build, WafPackage does not require waf to build. It only requires
|
||||
@@ -106,7 +106,7 @@ def install_args(self):
|
||||
|
||||
# Testing
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
"""Run unit tests after build.
|
||||
|
||||
By default, does nothing. Override this if you want to
|
||||
@@ -116,7 +116,7 @@ def test(self):
|
||||
|
||||
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
|
||||
|
||||
def installtest(self):
|
||||
def install_test(self):
|
||||
"""Run unit tests after install.
|
||||
|
||||
By default, does nothing. Override this if you want to
|
||||
|
||||
@@ -4,9 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import zlib
|
||||
@@ -26,7 +28,7 @@
|
||||
import spack.environment as ev
|
||||
from spack.error import SpackError
|
||||
import spack.hash_types as ht
|
||||
from spack.main import SpackCommand
|
||||
import spack.main
|
||||
import spack.repo
|
||||
from spack.spec import Spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -37,8 +39,8 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
spack_gpg = SpackCommand('gpg')
|
||||
spack_compiler = SpackCommand('compiler')
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
|
||||
|
||||
class TemporaryDirectory(object):
|
||||
@@ -421,12 +423,53 @@ def spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def find_matching_config(spec, ci_mappings):
|
||||
def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == 'tags':
|
||||
# For 'tags', we combine the lists of tags, while
|
||||
# avoiding duplicates
|
||||
for tag in src_dict[runner_attr]:
|
||||
if tag not in dest_dict[runner_attr]:
|
||||
dest_dict[runner_attr].append(tag)
|
||||
elif runner_attr in dest_dict and runner_attr == 'variables':
|
||||
# For 'variables', we merge the dictionaries. Any conflicts
|
||||
# (i.e. 'runner-attributes' has same variable key as the
|
||||
# higher level) we resolve by keeping the more specific
|
||||
# 'runner-attributes' version.
|
||||
for src_key, src_val in src_dict[runner_attr].items():
|
||||
dest_dict[runner_attr][src_key] = copy.deepcopy(
|
||||
src_dict[runner_attr][src_key])
|
||||
else:
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
ci_mappings = gitlab_ci['mappings']
|
||||
for ci_mapping in ci_mappings:
|
||||
for match_string in ci_mapping['match']:
|
||||
if spec_matches(spec, match_string):
|
||||
return ci_mapping['runner-attributes']
|
||||
return None
|
||||
if 'runner-attributes' in ci_mapping:
|
||||
copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
return runner_attributes
|
||||
else:
|
||||
return None
|
||||
|
||||
return runner_attributes
|
||||
|
||||
|
||||
def pkg_name_from_spec_label(spec_label):
|
||||
@@ -449,7 +492,6 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
|
||||
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
custom_spack_repo=None, custom_spack_ref=None,
|
||||
run_optimizer=False, use_dependencies=False):
|
||||
# FIXME: What's the difference between one that opens with 'spack'
|
||||
# and one that opens with 'env'? This will only handle the former.
|
||||
@@ -462,7 +504,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.die('Environment yaml does not have "gitlab-ci" section')
|
||||
|
||||
gitlab_ci = yaml_root['gitlab-ci']
|
||||
ci_mappings = gitlab_ci['mappings']
|
||||
|
||||
final_job_config = None
|
||||
if 'final-stage-rebuild-index' in gitlab_ci:
|
||||
@@ -488,22 +529,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
os.environ.get('SPACK_IS_PR_PIPELINE', '').lower() == 'true'
|
||||
)
|
||||
|
||||
# Make sure we use a custom spack if necessary
|
||||
before_script = None
|
||||
after_script = None
|
||||
if custom_spack_repo:
|
||||
if not custom_spack_ref:
|
||||
custom_spack_ref = 'master'
|
||||
before_script = [
|
||||
('git clone "{0}"'.format(custom_spack_repo)),
|
||||
'pushd ./spack && git checkout "{0}" && popd'.format(
|
||||
custom_spack_ref),
|
||||
'. "./spack/share/spack/setup-env.sh"',
|
||||
]
|
||||
after_script = [
|
||||
'rm -rf "./spack"'
|
||||
]
|
||||
|
||||
ci_mirrors = yaml_root['mirrors']
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
|
||||
@@ -580,7 +605,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
release_spec = root_spec[pkg_name]
|
||||
|
||||
runner_attribs = find_matching_config(
|
||||
release_spec, ci_mappings)
|
||||
release_spec, gitlab_ci)
|
||||
|
||||
if not runner_attribs:
|
||||
tty.warn('No match found for {0}, skipping it'.format(
|
||||
@@ -604,19 +629,27 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
except AttributeError:
|
||||
image_name = build_image
|
||||
|
||||
job_script = [
|
||||
'spack env activate --without-view .',
|
||||
'spack ci rebuild',
|
||||
]
|
||||
if 'script' in runner_attribs:
|
||||
job_script = [s for s in runner_attribs['script']]
|
||||
|
||||
before_script = None
|
||||
if 'before_script' in runner_attribs:
|
||||
before_script = [
|
||||
s for s in runner_attribs['before_script']
|
||||
]
|
||||
|
||||
after_script = None
|
||||
if 'after_script' in runner_attribs:
|
||||
after_script = [s for s in runner_attribs['after_script']]
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(phase_name, strip_compilers,
|
||||
release_spec, osname, build_group)
|
||||
|
||||
debug_flag = ''
|
||||
if 'enable-debug-messages' in gitlab_ci:
|
||||
debug_flag = '-d '
|
||||
|
||||
job_scripts = [
|
||||
'spack env activate .',
|
||||
'spack {0}ci rebuild'.format(debug_flag),
|
||||
]
|
||||
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
compiler_action = 'FIND_ANY'
|
||||
@@ -717,7 +750,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_object = {
|
||||
'stage': stage_name,
|
||||
'variables': variables,
|
||||
'script': job_scripts,
|
||||
'script': job_script,
|
||||
'tags': tags,
|
||||
'artifacts': {
|
||||
'paths': artifact_paths,
|
||||
@@ -788,6 +821,26 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
output_object['stages'] = stage_names
|
||||
|
||||
# Capture the version of spack used to generate the pipeline, transform it
|
||||
# into a value that can be passed to "git checkout", and save it in a
|
||||
# global yaml variable
|
||||
spack_version = spack.main.get_version()
|
||||
version_to_clone = None
|
||||
v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = 'v{0}'.format(v_match.group(0))
|
||||
else:
|
||||
v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
|
||||
if v_match:
|
||||
version_to_clone = v_match.group(1)
|
||||
else:
|
||||
version_to_clone = spack_version
|
||||
|
||||
output_object['variables'] = {
|
||||
'SPACK_VERSION': spack_version,
|
||||
'SPACK_CHECKOUT_VERSION': version_to_clone,
|
||||
}
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
@@ -1043,17 +1096,10 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||
build_env_src = os.path.join(stage_dir, 'spack-build-env.txt')
|
||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||
build_env_dst = os.path.join(
|
||||
job_log_dir, 'spack-build-env.txt')
|
||||
build_out_dst = os.path.join(
|
||||
job_log_dir, 'spack-build-out.txt')
|
||||
tty.debug('Copying logs to artifacts:')
|
||||
tty.debug(' 1: {0} -> {1}'.format(
|
||||
build_env_src, build_env_dst))
|
||||
shutil.copyfile(build_env_src, build_env_dst)
|
||||
tty.debug(' 2: {0} -> {1}'.format(
|
||||
tty.debug('Copying build log ({0}) to artifacts ({1})'.format(
|
||||
build_out_src, build_out_dst))
|
||||
shutil.copyfile(build_out_src, build_out_dst)
|
||||
except Exception as inst:
|
||||
|
||||
@@ -2,86 +2,15 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = "run a command in a spec's install environment, " \
|
||||
"or dump its environment to screen or file"
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--pickle', metavar="FILE",
|
||||
help="dump a pickled source-able environment to FILE"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
metavar='spec [--] [cmd]...',
|
||||
help="spec of package environment to emulate")
|
||||
subparser.epilog\
|
||||
= 'If a command is not specified, the environment will be printed ' \
|
||||
'to standard output (cf /usr/bin/env) unless --dump and/or --pickle ' \
|
||||
'are specified.\n\nIf a command is specified and spec is ' \
|
||||
'multi-word, then the -- separator is obligatory.'
|
||||
setup_parser = env_utility.setup_parser
|
||||
|
||||
|
||||
def build_env(parser, args):
|
||||
if not args.spec:
|
||||
tty.die("spack build-env requires a spec.")
|
||||
|
||||
# Specs may have spaces in them, so if they do, require that the
|
||||
# caller put a '--' between the spec and the command to be
|
||||
# executed. If there is no '--', assume that the spec is the
|
||||
# first argument.
|
||||
sep = '--'
|
||||
if sep in args.spec:
|
||||
s = args.spec.index(sep)
|
||||
spec = args.spec[:s]
|
||||
cmd = args.spec[s + 1:]
|
||||
else:
|
||||
spec = args.spec[0]
|
||||
cmd = args.spec[1:]
|
||||
|
||||
specs = spack.cmd.parse_specs(spec, concretize=True)
|
||||
if len(specs) > 1:
|
||||
tty.die("spack build-env only takes one spec.")
|
||||
spec = specs[0]
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty)
|
||||
|
||||
if args.dump:
|
||||
# Dump a source-able environment to a text file.
|
||||
tty.msg("Dumping a source-able environment to {0}".format(args.dump))
|
||||
dump_environment(args.dump)
|
||||
|
||||
if args.pickle:
|
||||
# Dump a source-able environment to a pickle file.
|
||||
tty.msg(
|
||||
"Pickling a source-able environment to {0}".format(args.pickle))
|
||||
pickle_environment(args.pickle)
|
||||
|
||||
if cmd:
|
||||
# Execute the command with the new environment
|
||||
os.execvp(cmd[0], cmd)
|
||||
|
||||
elif not bool(args.pickle or args.dump):
|
||||
# If no command or dump/pickle option act like the "env" command
|
||||
# and print out env vars.
|
||||
for key, val in os.environ.items():
|
||||
print("%s=%s" % (key, val))
|
||||
env_utility.emulate_env_utility('build-env', 'build', args)
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.architecture
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
@@ -25,6 +26,7 @@
|
||||
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.util.string import plural
|
||||
|
||||
from spack.cmd import display_specs
|
||||
|
||||
@@ -237,8 +239,9 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
specs: list of specs to be matched against installed packages
|
||||
allow_multiple_matches : if True multiple matches are admitted
|
||||
pkgs (string): spec to be matched against installed packages
|
||||
allow_multiple_matches (bool): if True multiple matches are admitted
|
||||
env (Environment): active environment, or ``None`` if there is not one
|
||||
|
||||
Return:
|
||||
list of specs
|
||||
@@ -288,8 +291,12 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
allarch = other_arch
|
||||
specs = bindist.get_specs(allarch)
|
||||
|
||||
specs = bindist.get_specs()
|
||||
if not other_arch:
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
for pkg in pkgs:
|
||||
matches = []
|
||||
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
||||
@@ -326,26 +333,25 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_yaml:
|
||||
packages = set()
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
yaml_text = fd.read()
|
||||
tty.debug('createtarball read spec yaml:')
|
||||
tty.debug(yaml_text)
|
||||
s = Spec.from_yaml(yaml_text)
|
||||
packages.add('/{0}'.format(s.dag_hash()))
|
||||
package = '/{0}'.format(s.dag_hash())
|
||||
matches = find_matching_specs(package, env=env)
|
||||
|
||||
elif packages:
|
||||
packages = packages
|
||||
matches = find_matching_specs(packages, env=env)
|
||||
|
||||
elif env:
|
||||
packages = env.concretized_user_specs
|
||||
matches = [env.specs_by_hash[h] for h in env.concretized_order]
|
||||
|
||||
else:
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an activate environment," +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a yaml file containing a spec" +
|
||||
" to install")
|
||||
pkgs = set(packages)
|
||||
specs = set()
|
||||
|
||||
mirror = spack.mirror.MirrorCollection().lookup(output_location)
|
||||
@@ -354,8 +360,6 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
msg = 'Buildcache files will be output to %s/build_cache' % outdir
|
||||
tty.msg(msg)
|
||||
|
||||
matches = find_matching_specs(pkgs, env=env)
|
||||
|
||||
if matches:
|
||||
tty.debug('Found at least one matching spec')
|
||||
|
||||
@@ -365,11 +369,16 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
tty.debug('skipping external or virtual spec %s' %
|
||||
match.format())
|
||||
else:
|
||||
if add_spec:
|
||||
lookup = spack.store.db.query_one(match)
|
||||
|
||||
if not add_spec:
|
||||
tty.debug('skipping matching root spec %s' % match.format())
|
||||
elif lookup is None:
|
||||
tty.debug('skipping uninstalled matching spec %s' %
|
||||
match.format())
|
||||
else:
|
||||
tty.debug('adding matching spec %s' % match.format())
|
||||
specs.add(match)
|
||||
else:
|
||||
tty.debug('skipping matching spec %s' % match.format())
|
||||
|
||||
if not add_deps:
|
||||
continue
|
||||
@@ -382,9 +391,14 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
if d == 0:
|
||||
continue
|
||||
|
||||
lookup = spack.store.db.query_one(node)
|
||||
|
||||
if node.external or node.virtual:
|
||||
tty.debug('skipping external or virtual dependency %s' %
|
||||
node.format())
|
||||
elif lookup is None:
|
||||
tty.debug('skipping uninstalled depenendency %s' %
|
||||
node.format())
|
||||
else:
|
||||
tty.debug('adding dependency %s' % node.format())
|
||||
specs.add(node)
|
||||
@@ -393,9 +407,12 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
|
||||
for spec in specs:
|
||||
tty.debug('creating binary cache file for package %s ' % spec.format())
|
||||
bindist.build_tarball(spec, outdir, force, make_relative,
|
||||
unsigned, allow_root, signing_key,
|
||||
rebuild_index)
|
||||
try:
|
||||
bindist.build_tarball(spec, outdir, force, make_relative,
|
||||
unsigned, allow_root, signing_key,
|
||||
rebuild_index)
|
||||
except bindist.NoOverwriteException as e:
|
||||
tty.warn(e)
|
||||
|
||||
|
||||
def createtarball(args):
|
||||
@@ -488,10 +505,20 @@ def install_tarball(spec, args):
|
||||
|
||||
def listspecs(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
specs = bindist.get_specs(args.allarch)
|
||||
specs = bindist.get_specs()
|
||||
if not args.allarch:
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
if args.specs:
|
||||
constraints = set(args.specs)
|
||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
||||
if sys.stdout.isatty():
|
||||
builds = len(specs)
|
||||
tty.msg("%s." % plural(builds, 'cached build'))
|
||||
if not builds and not args.allarch:
|
||||
tty.msg("You can query all available architectures with:",
|
||||
"spack buildcache list --allarch")
|
||||
display_specs(specs, args, all_headers=True)
|
||||
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ def checksum(parser, args):
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0),
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options)
|
||||
|
||||
print()
|
||||
|
||||
@@ -45,15 +45,6 @@ def setup_parser(subparser):
|
||||
'--copy-to', default=None,
|
||||
help="Absolute path of additional location where generated jobs " +
|
||||
"yaml file should be copied. Default is not to copy.")
|
||||
generate.add_argument(
|
||||
'--spack-repo', default=None,
|
||||
help="Provide a url for this argument if a custom spack repo " +
|
||||
"should be cloned as a step in each generated job.")
|
||||
generate.add_argument(
|
||||
'--spack-ref', default=None,
|
||||
help="Provide a git branch or tag if a custom spack branch " +
|
||||
"should be checked out as a step in each generated job. " +
|
||||
"This argument is ignored if no --spack-repo is provided.")
|
||||
generate.add_argument(
|
||||
'--optimize', action='store_true', default=False,
|
||||
help="(Experimental) run the generated document through a series of "
|
||||
@@ -82,8 +73,6 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
spack_repo = args.spack_repo
|
||||
spack_ref = args.spack_ref
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
|
||||
@@ -97,8 +86,7 @@ def ci_generate(args):
|
||||
|
||||
# Generate the jobs
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, spack_repo, spack_ref,
|
||||
run_optimizer=run_optimizer,
|
||||
env, True, output_file, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies)
|
||||
|
||||
if copy_yaml_to:
|
||||
@@ -249,8 +237,11 @@ def ci_rebuild(args):
|
||||
|
||||
# Make a copy of the environment file, so we can overwrite the changed
|
||||
# version in between the two invocations of "spack install"
|
||||
env_src_path = os.path.join(current_directory, 'spack.yaml')
|
||||
env_dst_path = os.path.join(current_directory, 'spack.yaml_BACKUP')
|
||||
env_src_path = env.manifest_path
|
||||
env_dirname = os.path.dirname(env_src_path)
|
||||
env_filename = os.path.basename(env_src_path)
|
||||
env_copyname = '{0}_BACKUP'.format(env_filename)
|
||||
env_dst_path = os.path.join(env_dirname, env_copyname)
|
||||
shutil.copyfile(env_src_path, env_dst_path)
|
||||
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
||||
@@ -339,8 +330,10 @@ def ci_rebuild(args):
|
||||
first_pass_args))
|
||||
spack_cmd(*first_pass_args)
|
||||
|
||||
# Overwrite the changed environment file so it doesn't
|
||||
# Overwrite the changed environment file so it doesn't break
|
||||
# the next install invocation.
|
||||
tty.debug('Copying {0} to {1}'.format(
|
||||
env_dst_path, env_src_path))
|
||||
shutil.copyfile(env_dst_path, env_src_path)
|
||||
|
||||
second_pass_args = install_args + [
|
||||
|
||||
@@ -10,10 +10,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.cmd.test
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.config
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
|
||||
|
||||
@@ -275,3 +275,53 @@ def no_checksum():
|
||||
return Args(
|
||||
'-n', '--no-checksum', action='store_true', default=False,
|
||||
help="do not use checksums to verify downloaded files (unsafe)")
|
||||
|
||||
|
||||
def add_cdash_args(subparser, add_help):
|
||||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help['upload-url'] = "CDash URL where reports will be uploaded"
|
||||
cdash_help['build'] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to operate on."""
|
||||
cdash_help['site'] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help['track'] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help['buildstamp'] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
else:
|
||||
cdash_help['upload-url'] = argparse.SUPPRESS
|
||||
cdash_help['build'] = argparse.SUPPRESS
|
||||
cdash_help['site'] = argparse.SUPPRESS
|
||||
cdash_help['track'] = argparse.SUPPRESS
|
||||
cdash_help['buildstamp'] = argparse.SUPPRESS
|
||||
|
||||
subparser.add_argument(
|
||||
'--cdash-upload-url',
|
||||
default=None,
|
||||
help=cdash_help['upload-url']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--cdash-build',
|
||||
default=None,
|
||||
help=cdash_help['build']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--cdash-site',
|
||||
default=None,
|
||||
help=cdash_help['site']
|
||||
)
|
||||
|
||||
cdash_subgroup = subparser.add_mutually_exclusive_group()
|
||||
cdash_subgroup.add_argument(
|
||||
'--cdash-track',
|
||||
default='Experimental',
|
||||
help=cdash_help['track']
|
||||
)
|
||||
cdash_subgroup.add_argument(
|
||||
'--cdash-buildstamp',
|
||||
default=None,
|
||||
help=cdash_help['buildstamp']
|
||||
)
|
||||
|
||||
82
lib/spack/spack/cmd/common/env_utility.py
Normal file
82
lib/spack/spack/cmd/common/env_utility.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.build_environment as build_environment
|
||||
import spack.paths
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--pickle', metavar="FILE",
|
||||
help="dump a pickled source-able environment to FILE"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
metavar='spec [--] [cmd]...',
|
||||
help="specs of package environment to emulate")
|
||||
subparser.epilog\
|
||||
= 'If a command is not specified, the environment will be printed ' \
|
||||
'to standard output (cf /usr/bin/env) unless --dump and/or --pickle ' \
|
||||
'are specified.\n\nIf a command is specified and spec is ' \
|
||||
'multi-word, then the -- separator is obligatory.'
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
|
||||
# Specs may have spaces in them, so if they do, require that the
|
||||
# caller put a '--' between the spec and the command to be
|
||||
# executed. If there is no '--', assume that the spec is the
|
||||
# first argument.
|
||||
sep = '--'
|
||||
if sep in args.spec:
|
||||
s = args.spec.index(sep)
|
||||
spec = args.spec[:s]
|
||||
cmd = args.spec[s + 1:]
|
||||
else:
|
||||
spec = args.spec[0]
|
||||
cmd = args.spec[1:]
|
||||
|
||||
specs = spack.cmd.parse_specs(spec, concretize=True)
|
||||
if len(specs) > 1:
|
||||
tty.die("spack %s only takes one spec." % cmd_name)
|
||||
spec = specs[0]
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty, context)
|
||||
|
||||
if args.dump:
|
||||
# Dump a source-able environment to a text file.
|
||||
tty.msg("Dumping a source-able environment to {0}".format(args.dump))
|
||||
dump_environment(args.dump)
|
||||
|
||||
if args.pickle:
|
||||
# Dump a source-able environment to a pickle file.
|
||||
tty.msg(
|
||||
"Pickling a source-able environment to {0}".format(args.pickle))
|
||||
pickle_environment(args.pickle)
|
||||
|
||||
if cmd:
|
||||
# Execute the command with the new environment
|
||||
os.execvp(cmd[0], cmd)
|
||||
|
||||
elif not bool(args.pickle or args.dump):
|
||||
# If no command or dump/pickle option act like the "env" command
|
||||
# and print out env vars.
|
||||
for key, val in os.environ.items():
|
||||
print("%s=%s" % (key, val))
|
||||
@@ -2,16 +2,19 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.cmd.common.arguments
|
||||
import spack.schema.env
|
||||
import spack.environment as ev
|
||||
import spack.schema.packages
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.util.editor import editor
|
||||
|
||||
@@ -80,6 +83,19 @@ def setup_parser(subparser):
|
||||
# Make the add parser available later
|
||||
setup_parser.add_parser = add_parser
|
||||
|
||||
update = sp.add_parser(
|
||||
'update', help='update configuration files to the latest format'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(update, ['yes_to_all'])
|
||||
update.add_argument('section', help='section to update')
|
||||
|
||||
revert = sp.add_parser(
|
||||
'revert',
|
||||
help='revert configuration files to their state before update'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(revert, ['yes_to_all'])
|
||||
revert.add_argument('section', help='section to update')
|
||||
|
||||
|
||||
def _get_scope_and_section(args):
|
||||
"""Extract config scope and section from arguments."""
|
||||
@@ -161,14 +177,6 @@ def config_list(args):
|
||||
print(' '.join(list(spack.config.section_schemas)))
|
||||
|
||||
|
||||
def set_config(args, section, new, scope):
|
||||
if re.match(r'env.*', scope):
|
||||
e = ev.get_env(args, 'config add')
|
||||
e.set_config(section, new)
|
||||
else:
|
||||
spack.config.set(section, new, scope=scope)
|
||||
|
||||
|
||||
def config_add(args):
|
||||
"""Add the given configuration to the specified config scope
|
||||
|
||||
@@ -200,7 +208,7 @@ def config_add(args):
|
||||
existing = spack.config.get(section, scope=scope)
|
||||
new = spack.config.merge_yaml(existing, value)
|
||||
|
||||
set_config(args, section, new, scope)
|
||||
spack.config.set(section, new, scope)
|
||||
|
||||
if args.path:
|
||||
components = spack.config.process_config_path(args.path)
|
||||
@@ -244,7 +252,7 @@ def config_add(args):
|
||||
|
||||
# merge value into existing
|
||||
new = spack.config.merge_yaml(existing, value)
|
||||
set_config(args, path, new, scope)
|
||||
spack.config.set(path, new, scope)
|
||||
|
||||
|
||||
def config_remove(args):
|
||||
@@ -272,15 +280,167 @@ def config_remove(args):
|
||||
# This should be impossible to reach
|
||||
raise spack.config.ConfigError('Config has nested non-dict values')
|
||||
|
||||
set_config(args, path, existing, scope)
|
||||
spack.config.set(path, existing, scope)
|
||||
|
||||
|
||||
def _can_update_config_file(scope_dir, cfg_file):
|
||||
dir_ok = fs.can_write_to_dir(scope_dir)
|
||||
cfg_ok = fs.can_access(cfg_file)
|
||||
return dir_ok and cfg_ok
|
||||
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
updates = spack.config.config.format_updates[args.section]
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(
|
||||
scope.name, args.section
|
||||
)
|
||||
scope_dir = scope.path
|
||||
can_be_updated = _can_update_config_file(scope_dir, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == 'system':
|
||||
skip_system_scope = True
|
||||
msg = ('Not enough permissions to write to "system" scope. '
|
||||
'Skipping update at that location [cfg={0}]')
|
||||
tty.warn(msg.format(cfg_file))
|
||||
continue
|
||||
cannot_overwrite.append((scope, cfg_file))
|
||||
|
||||
if cannot_overwrite:
|
||||
msg = 'Detected permission issues with the following scopes:\n\n'
|
||||
for scope, cfg_file in cannot_overwrite:
|
||||
msg += '\t[scope={0}, cfg={1}]\n'.format(scope.name, cfg_file)
|
||||
msg += ('\nEither ensure that you have sufficient permissions to '
|
||||
'modify these files or do not include these scopes in the '
|
||||
'update.')
|
||||
tty.die(msg)
|
||||
|
||||
if skip_system_scope:
|
||||
updates = [x for x in updates if x.name != 'system']
|
||||
|
||||
# Report if there are no updates to be done
|
||||
if not updates:
|
||||
msg = 'No updates needed for "{0}" section.'
|
||||
tty.msg(msg.format(args.section))
|
||||
return
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = ('The following configuration files are going to be updated to'
|
||||
' the latest schema format:\n\n')
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(
|
||||
scope.name, args.section
|
||||
)
|
||||
msg += '\t[scope={0}, file={1}]\n'.format(scope.name, cfg_file)
|
||||
msg += ('\nIf the configuration files are updated, versions of Spack '
|
||||
'that are older than this version may not be able to read '
|
||||
'them. Spack stores backups of the updated files which can '
|
||||
'be retrieved with "spack config revert"')
|
||||
tty.msg(msg)
|
||||
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die('Operation aborted.')
|
||||
|
||||
# Get a function to update the format
|
||||
update_fn = spack.config.ensure_latest_format_fn(args.section)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(
|
||||
scope.name, args.section
|
||||
)
|
||||
with open(cfg_file) as f:
|
||||
data = syaml.load_config(f) or {}
|
||||
data = data.pop(args.section, {})
|
||||
update_fn(data)
|
||||
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + '.bkp'
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(
|
||||
args.section, data, scope=scope.name, force=True
|
||||
)
|
||||
msg = 'File "{0}" updated [backup={1}]'
|
||||
tty.msg(msg.format(cfg_file, bkp_file))
|
||||
|
||||
|
||||
def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
dir_ok = fs.can_write_to_dir(scope_dir)
|
||||
cfg_ok = not os.path.exists(cfg_file) or fs.can_access(cfg_file)
|
||||
bkp_ok = fs.can_access(bkp_file)
|
||||
return dir_ok and cfg_ok and bkp_ok
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [
|
||||
x.name for x in spack.config.config.file_scopes
|
||||
]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple('Entry', ['scope', 'cfg', 'bkp'])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + '.bkp'
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
if not os.path.exists(bkp_file):
|
||||
continue
|
||||
|
||||
# If it exists and we don't have write access in this scope
|
||||
# keep track of it and report a comprehensive error later
|
||||
entry = Entry(scope, cfg_file, bkp_file)
|
||||
scope_dir = os.path.dirname(bkp_file)
|
||||
can_be_reverted = _can_revert_update(scope_dir, cfg_file, bkp_file)
|
||||
if not can_be_reverted:
|
||||
cannot_overwrite.append(entry)
|
||||
continue
|
||||
|
||||
to_be_restored.append(entry)
|
||||
|
||||
# Report errors if we can't revert a configuration
|
||||
if cannot_overwrite:
|
||||
msg = 'Detected permission issues with the following scopes:\n\n'
|
||||
for e in cannot_overwrite:
|
||||
msg += '\t[scope={0.scope}, cfg={0.cfg}, bkp={0.bkp}]\n'.format(e)
|
||||
msg += ('\nEither ensure to have the right permissions before retrying'
|
||||
' or be more specific on the scope to revert.')
|
||||
tty.die(msg)
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = ('The following scopes will be restored from the corresponding'
|
||||
' backup files:\n')
|
||||
for entry in to_be_restored:
|
||||
msg += '\t[scope={0.scope}, bkp={0.bkp}]\n'.format(entry)
|
||||
msg += 'This operation cannot be undone.'
|
||||
tty.msg(msg)
|
||||
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die('Operation aborted.')
|
||||
|
||||
for _, cfg_file, bkp_file in to_be_restored:
|
||||
shutil.copy(bkp_file, cfg_file)
|
||||
os.unlink(bkp_file)
|
||||
msg = 'File "{0}" reverted to old state'
|
||||
tty.msg(msg.format(cfg_file))
|
||||
|
||||
|
||||
def config(parser, args):
|
||||
action = {'get': config_get,
|
||||
'blame': config_blame,
|
||||
'edit': config_edit,
|
||||
'list': config_list,
|
||||
'add': config_add,
|
||||
'rm': config_remove,
|
||||
'remove': config_remove}
|
||||
action = {
|
||||
'get': config_get,
|
||||
'blame': config_blame,
|
||||
'edit': config_edit,
|
||||
'list': config_list,
|
||||
'add': config_add,
|
||||
'rm': config_remove,
|
||||
'remove': config_remove,
|
||||
'update': config_update,
|
||||
'revert': config_revert
|
||||
}
|
||||
action[args.config_command](args)
|
||||
|
||||
@@ -204,6 +204,17 @@ def qmake_args(self):
|
||||
return args"""
|
||||
|
||||
|
||||
class MavenPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Maven-based packages"""
|
||||
|
||||
base_class_name = 'MavenPackage'
|
||||
|
||||
body_def = """\
|
||||
def build(self, spec, prefix):
|
||||
# FIXME: If not needed delete this function
|
||||
pass"""
|
||||
|
||||
|
||||
class SconsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SCons-based packages"""
|
||||
|
||||
@@ -352,6 +363,34 @@ def __init__(self, name, *args, **kwargs):
|
||||
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class RubyPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Ruby packages"""
|
||||
|
||||
base_class_name = 'RubyPackage'
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the ruby dependency
|
||||
# if you need specific versions. A generic ruby dependency is
|
||||
# added implicity by the RubyPackage class.
|
||||
# depends_on('ruby@X.Y.Z:', type=('build', 'run'))
|
||||
# depends_on('ruby-foo', type=('build', 'run'))"""
|
||||
|
||||
body_def = """\
|
||||
def build(self, spec, prefix):
|
||||
# FIXME: If not needed delete this function
|
||||
pass"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
# If the user provided `--name ruby-numpy`, don't rename it
|
||||
# ruby-ruby-numpy
|
||||
if not name.startswith('ruby-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||
name = 'ruby-{0}'.format(name)
|
||||
|
||||
super(RubyPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class MakefilePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Makefile packages"""
|
||||
|
||||
@@ -402,6 +441,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
'cmake': CMakePackageTemplate,
|
||||
'bundle': BundlePackageTemplate,
|
||||
'qmake': QMakePackageTemplate,
|
||||
'maven': MavenPackageTemplate,
|
||||
'scons': SconsPackageTemplate,
|
||||
'waf': WafPackageTemplate,
|
||||
'bazel': BazelPackageTemplate,
|
||||
@@ -410,6 +450,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
'perlmake': PerlmakePackageTemplate,
|
||||
'perlbuild': PerlbuildPackageTemplate,
|
||||
'octave': OctavePackageTemplate,
|
||||
'ruby': RubyPackageTemplate,
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'meson': MesonPackageTemplate,
|
||||
@@ -445,6 +486,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--skip-editor', action='store_true',
|
||||
help="skip the edit session for the package (e.g., automation)")
|
||||
subparser.add_argument(
|
||||
'-b', '--batch', action='store_true',
|
||||
help="don't ask which versions to checksum")
|
||||
|
||||
|
||||
class BuildSystemGuesser:
|
||||
@@ -461,12 +505,16 @@ def __call__(self, stage, url):
|
||||
"""Try to guess the type of build system used by a project based on
|
||||
the contents of its archive or the URL it was downloaded from."""
|
||||
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if url is not None and 'downloads.sourceforge.net/octave/' in url:
|
||||
self.build_system = 'octave'
|
||||
return
|
||||
if url is not None:
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if 'downloads.sourceforge.net/octave/' in url:
|
||||
self.build_system = 'octave'
|
||||
return
|
||||
if url.endswith('.gem'):
|
||||
self.build_system = 'ruby'
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
@@ -479,12 +527,16 @@ def __call__(self, stage, url):
|
||||
(r'/configure$', 'autotools'),
|
||||
(r'/configure\.(in|ac)$', 'autoreconf'),
|
||||
(r'/Makefile\.am$', 'autoreconf'),
|
||||
(r'/pom\.xml$', 'maven'),
|
||||
(r'/SConstruct$', 'scons'),
|
||||
(r'/waf$', 'waf'),
|
||||
(r'/setup\.py$', 'python'),
|
||||
(r'/WORKSPACE$', 'bazel'),
|
||||
(r'/Build\.PL$', 'perlbuild'),
|
||||
(r'/Makefile\.PL$', 'perlmake'),
|
||||
(r'/.*\.gemspec$', 'ruby'),
|
||||
(r'/Rakefile$', 'ruby'),
|
||||
(r'/setup\.rb$', 'ruby'),
|
||||
(r'/.*\.pro$', 'qmake'),
|
||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||
(r'/DESCRIPTION$', 'octave'),
|
||||
@@ -511,7 +563,7 @@ def __call__(self, stage, url):
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
for pattern, bs in clues:
|
||||
if any(re.search(pattern, l) for l in lines):
|
||||
if any(re.search(pattern, line) for line in lines):
|
||||
self.build_system = bs
|
||||
break
|
||||
|
||||
@@ -629,7 +681,8 @@ def get_versions(args, name):
|
||||
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
url_dict, name, first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage, batch=True)
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1))
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
@@ -14,6 +15,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.schema.env
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.uninstall
|
||||
import spack.cmd.modules
|
||||
@@ -37,6 +39,8 @@
|
||||
['status', 'st'],
|
||||
'loads',
|
||||
'view',
|
||||
'update',
|
||||
'revert'
|
||||
]
|
||||
|
||||
|
||||
@@ -351,6 +355,9 @@ def env_status(args):
|
||||
% (ev.manifest_name, env.path))
|
||||
else:
|
||||
tty.msg('In environment %s' % env.name)
|
||||
|
||||
# Check if environment views can be safely activated
|
||||
env.check_views()
|
||||
else:
|
||||
tty.msg('No active environment')
|
||||
|
||||
@@ -391,6 +398,80 @@ def env_loads(args):
|
||||
print(' source %s' % loads_file)
|
||||
|
||||
|
||||
def env_update_setup_parser(subparser):
|
||||
"""update environments to the latest format"""
|
||||
subparser.add_argument(
|
||||
metavar='env', dest='env',
|
||||
help='name or directory of the environment to activate'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
||||
|
||||
|
||||
def env_update(args):
|
||||
manifest_file = ev.manifest_file(args.env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
needs_update = not ev.is_latest_format(manifest_file)
|
||||
|
||||
if not needs_update:
|
||||
tty.msg('No update needed for the environment "{0}"'.format(args.env))
|
||||
return
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = ('The environment "{0}" is going to be updated to the latest '
|
||||
'schema format.\nIf the environment is updated, versions of '
|
||||
'Spack that are older than this version may not be able to '
|
||||
'read it. Spack stores backups of the updated environment '
|
||||
'which can be retrieved with "spack env revert"')
|
||||
tty.msg(msg.format(args.env))
|
||||
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die('Operation aborted.')
|
||||
|
||||
ev.update_yaml(manifest_file, backup_file=backup_file)
|
||||
msg = 'Environment "{0}" has been updated [backup={1}]'
|
||||
tty.msg(msg.format(args.env, backup_file))
|
||||
|
||||
|
||||
def env_revert_setup_parser(subparser):
|
||||
"""restore environments to their state before update"""
|
||||
subparser.add_argument(
|
||||
metavar='env', dest='env',
|
||||
help='name or directory of the environment to activate'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
||||
|
||||
|
||||
def env_revert(args):
|
||||
manifest_file = ev.manifest_file(args.env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
# Check that both the spack.yaml and the backup exist, the inform user
|
||||
# on what is going to happen and ask for confirmation
|
||||
if not os.path.exists(manifest_file):
|
||||
msg = 'cannot fine the manifest file of the environment [file={0}]'
|
||||
tty.die(msg.format(manifest_file))
|
||||
if not os.path.exists(backup_file):
|
||||
msg = 'cannot find the old manifest file to be restored [file={0}]'
|
||||
tty.die(msg.format(backup_file))
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = ('Spack is going to overwrite the current manifest file'
|
||||
' with a backup copy [manifest={0}, backup={1}]')
|
||||
tty.msg(msg.format(manifest_file, backup_file))
|
||||
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die('Operation aborted.')
|
||||
|
||||
shutil.copy(backup_file, manifest_file)
|
||||
os.remove(backup_file)
|
||||
msg = 'Environment "{0}" reverted to old state'
|
||||
tty.msg(msg.format(manifest_file))
|
||||
|
||||
|
||||
#: Dictionary mapping subcommand names and aliases to functions
|
||||
subcommand_functions = {}
|
||||
|
||||
|
||||
@@ -2,22 +2,25 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import sys
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
import spack
|
||||
import spack.error
|
||||
import llnl.util.tty as tty
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.environment
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
import six
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
description = "add external packages to Spack configuration"
|
||||
description = "manage external packages in Spack configuration"
|
||||
section = "config"
|
||||
level = "short"
|
||||
|
||||
@@ -26,12 +29,25 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(
|
||||
metavar='SUBCOMMAND', dest='external_command')
|
||||
|
||||
find_parser = sp.add_parser('find', help=external_find.__doc__)
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser(
|
||||
'find', help='add external packages to packages.yaml'
|
||||
)
|
||||
find_parser.add_argument(
|
||||
'--not-buildable', action='store_true', default=False,
|
||||
help="packages with detected externals won't be built with Spack")
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope('packages'),
|
||||
help="configuration scope to modify")
|
||||
find_parser.add_argument('packages', nargs=argparse.REMAINDER)
|
||||
|
||||
sp.add_parser(
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
)
|
||||
|
||||
|
||||
def is_executable(path):
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
@@ -74,19 +90,37 @@ def _generate_pkg_config(external_pkg_entries):
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
external entries for the CMake package, this could return::
|
||||
|
||||
{ 'paths': {
|
||||
'cmake@3.17.1': '/opt/cmake-3.17.1/',
|
||||
'cmake@3.16.5': '/opt/cmake-3.16.5/'
|
||||
}
|
||||
{
|
||||
'externals': [{
|
||||
'spec': 'cmake@3.17.1',
|
||||
'prefix': '/opt/cmake-3.17.1/'
|
||||
}, {
|
||||
'spec': 'cmake@3.16.5',
|
||||
'prefix': '/opt/cmake-3.16.5/'
|
||||
}]
|
||||
}
|
||||
"""
|
||||
paths_dict = syaml.syaml_dict()
|
||||
|
||||
pkg_dict = syaml.syaml_dict()
|
||||
pkg_dict['externals'] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
paths_dict[str(e.spec)] = e.base_dir
|
||||
pkg_dict = syaml.syaml_dict()
|
||||
pkg_dict['paths'] = paths_dict
|
||||
|
||||
external_items = [('spec', str(e.spec)), ('prefix', e.base_dir)]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(('modules', e.spec.external_modules))
|
||||
|
||||
if e.spec.extra_attributes:
|
||||
external_items.append(
|
||||
('extra_attributes',
|
||||
syaml.syaml_dict(e.spec.extra_attributes.items()))
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
pkg_dict['externals'].append(
|
||||
syaml.syaml_dict(external_items)
|
||||
)
|
||||
|
||||
return pkg_dict
|
||||
|
||||
@@ -120,7 +154,17 @@ def external_find(args):
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
pkg_to_entries = _get_external_packages(packages_to_check)
|
||||
_update_pkg_config(pkg_to_entries, args.not_buildable)
|
||||
new_entries = _update_pkg_config(
|
||||
args.scope, pkg_to_entries, args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, 'packages')
|
||||
msg = ('The following specs have been detected on this system '
|
||||
'and added to {0}')
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
@@ -162,32 +206,34 @@ def _get_predefined_externals():
|
||||
pkg_config = spack.config.get('packages')
|
||||
already_defined_specs = set()
|
||||
for pkg_name, per_pkg_cfg in pkg_config.items():
|
||||
paths = per_pkg_cfg.get('paths', {})
|
||||
already_defined_specs.update(spack.spec.Spec(k) for k in paths)
|
||||
modules = per_pkg_cfg.get('modules', {})
|
||||
already_defined_specs.update(spack.spec.Spec(k) for k in modules)
|
||||
for item in per_pkg_cfg.get('externals', []):
|
||||
already_defined_specs.add(spack.spec.Spec(item['spec']))
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _update_pkg_config(pkg_to_entries, not_buildable):
|
||||
def _update_pkg_config(scope, pkg_to_entries, not_buildable):
|
||||
predefined_external_specs = _get_predefined_externals()
|
||||
|
||||
pkg_to_cfg = {}
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for pkg_name, ext_pkg_entries in pkg_to_entries.items():
|
||||
new_entries = list(
|
||||
e for e in ext_pkg_entries
|
||||
if (e.spec not in predefined_external_specs))
|
||||
|
||||
pkg_config = _generate_pkg_config(new_entries)
|
||||
all_new_specs.extend([
|
||||
spack.spec.Spec(x['spec']) for x in pkg_config.get('externals', [])
|
||||
])
|
||||
if not_buildable:
|
||||
pkg_config['buildable'] = False
|
||||
pkg_to_cfg[pkg_name] = pkg_config
|
||||
|
||||
cfg_scope = spack.config.default_modify_scope()
|
||||
pkgs_cfg = spack.config.get('packages', scope=cfg_scope)
|
||||
pkgs_cfg = spack.config.get('packages', scope=scope)
|
||||
|
||||
spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set('packages', pkgs_cfg, scope=cfg_scope)
|
||||
spack.config.set('packages', pkgs_cfg, scope=scope)
|
||||
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def _get_external_packages(packages_to_check, system_path_to_exe=None):
|
||||
@@ -234,7 +280,7 @@ def _get_external_packages(packages_to_check, system_path_to_exe=None):
|
||||
|
||||
if not specs:
|
||||
tty.debug(
|
||||
'The following executables in {0} were decidedly not'
|
||||
'The following executables in {0} were decidedly not '
|
||||
'part of the package {1}: {2}'
|
||||
.format(prefix, pkg.name, ', '.join(exes_in_prefix))
|
||||
)
|
||||
@@ -259,13 +305,33 @@ def _get_external_packages(packages_to_check, system_path_to_exe=None):
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = ('"{0}" has been detected on the system but will '
|
||||
'not be added to packages.yaml [reason={1}]')
|
||||
tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(
|
||||
ExternalPackageEntry(spec=spec, base_dir=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find}
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
|
||||
print("Repository:", namespace)
|
||||
colify.colify(pkgs, indent=4, output=sys.stdout)
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find, 'list': external_list}
|
||||
action[args.external_command](args)
|
||||
|
||||
@@ -35,6 +35,10 @@
|
||||
@g{%compiler@version} build with specific compiler version
|
||||
@g{%compiler@min:max} specific version range (see above)
|
||||
|
||||
compiler flags:
|
||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||
fflags, ldflags, ldlibs
|
||||
|
||||
variants:
|
||||
@B{+variant} enable <variant>
|
||||
@r{-variant} or @r{~variant} disable <variant>
|
||||
@@ -42,7 +46,7 @@
|
||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, bgq, etc.
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@m{os=operating_system} specific <operating_system>
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
||||
@@ -160,60 +160,10 @@ def setup_parser(subparser):
|
||||
action='store_true',
|
||||
help="Show usage instructions for CDash reporting"
|
||||
)
|
||||
add_cdash_args(subparser, False)
|
||||
arguments.add_cdash_args(subparser, False)
|
||||
arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
|
||||
|
||||
|
||||
def add_cdash_args(subparser, add_help):
|
||||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help['upload-url'] = "CDash URL where reports will be uploaded"
|
||||
cdash_help['build'] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to install."""
|
||||
cdash_help['site'] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help['track'] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help['buildstamp'] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
else:
|
||||
cdash_help['upload-url'] = argparse.SUPPRESS
|
||||
cdash_help['build'] = argparse.SUPPRESS
|
||||
cdash_help['site'] = argparse.SUPPRESS
|
||||
cdash_help['track'] = argparse.SUPPRESS
|
||||
cdash_help['buildstamp'] = argparse.SUPPRESS
|
||||
|
||||
subparser.add_argument(
|
||||
'--cdash-upload-url',
|
||||
default=None,
|
||||
help=cdash_help['upload-url']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--cdash-build',
|
||||
default=None,
|
||||
help=cdash_help['build']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--cdash-site',
|
||||
default=None,
|
||||
help=cdash_help['site']
|
||||
)
|
||||
|
||||
cdash_subgroup = subparser.add_mutually_exclusive_group()
|
||||
cdash_subgroup.add_argument(
|
||||
'--cdash-track',
|
||||
default='Experimental',
|
||||
help=cdash_help['track']
|
||||
)
|
||||
cdash_subgroup.add_argument(
|
||||
'--cdash-buildstamp',
|
||||
default=None,
|
||||
help=cdash_help['buildstamp']
|
||||
)
|
||||
|
||||
|
||||
def default_log_file(spec):
|
||||
"""Computes the default filename for the log file and creates
|
||||
the corresponding directory if not present
|
||||
@@ -263,7 +213,7 @@ def install(parser, args, **kwargs):
|
||||
SPACK_CDASH_AUTH_TOKEN
|
||||
authentication token to present to CDash
|
||||
'''))
|
||||
add_cdash_args(parser, True)
|
||||
arguments.add_cdash_args(parser, True)
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
@@ -313,7 +263,8 @@ def install(parser, args, **kwargs):
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
# 1. Abstract specs from cli
|
||||
reporter = spack.report.collect_info(args.log_format, args)
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
@@ -353,7 +304,7 @@ def install(parser, args, **kwargs):
|
||||
if not args.log_file and not reporter.filename:
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
with reporter:
|
||||
with reporter('build'):
|
||||
if args.overwrite:
|
||||
|
||||
installed = list(filter(lambda x: x,
|
||||
|
||||
@@ -54,6 +54,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--update', metavar='FILE', default=None, action='store',
|
||||
help='write output to the specified file, if any package is newer')
|
||||
subparser.add_argument(
|
||||
'-v', '--virtuals', action='store_true', default=False,
|
||||
help='include virtual packages in list')
|
||||
|
||||
arguments.add_common_arguments(subparser, ['tags'])
|
||||
|
||||
@@ -267,7 +270,7 @@ def list(parser, args):
|
||||
formatter = formatters[args.format]
|
||||
|
||||
# Retrieve the names of all the packages
|
||||
pkgs = set(spack.repo.all_package_names())
|
||||
pkgs = set(spack.repo.all_package_names(args.virtuals))
|
||||
# Filter the set appropriately
|
||||
sorted_packages = filter_by_name(pkgs, args)
|
||||
|
||||
|
||||
@@ -70,6 +70,6 @@ def python(parser, args, unknown_args):
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push('import readline')
|
||||
|
||||
console.interact("Spack version %s\nPython %s, %s %s"""
|
||||
console.interact("Spack version %s\nPython %s, %s %s"
|
||||
% (spack.spack_version, platform.python_version(),
|
||||
platform.system(), platform.machine()))
|
||||
|
||||
@@ -37,6 +37,7 @@ def setup_parser(subparser):
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
subparser.epilog = 'DEPRECATED: use `spack dev-build` instead'
|
||||
|
||||
|
||||
def write_spconfig(package, dirty):
|
||||
@@ -98,6 +99,8 @@ def cmdlist(str):
|
||||
|
||||
|
||||
def setup(self, args):
|
||||
tty.warn('DEPRECATED: use `spack dev-build` instead')
|
||||
|
||||
if not args.spec:
|
||||
tty.die("spack setup requires a package spec argument.")
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ def setup_parser(subparser):
|
||||
const='yaml', help='print concrete spec as YAML')
|
||||
subparser.add_argument(
|
||||
'-j', '--json', action='store_const', dest='format', default=None,
|
||||
const='json', help='print concrete spec as YAML')
|
||||
const='json', help='print concrete spec as JSON')
|
||||
subparser.add_argument(
|
||||
'-c', '--cover', action='store',
|
||||
default='nodes', choices=['nodes', 'edges', 'paths'],
|
||||
|
||||
@@ -4,166 +4,319 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import collections
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import argparse
|
||||
import pytest
|
||||
from six import StringIO
|
||||
import textwrap
|
||||
import fnmatch
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
from llnl.util.tty.colify import colify
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.paths
|
||||
import spack.install_test
|
||||
import spack.environment as ev
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.report
|
||||
import spack.package
|
||||
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
section = "developer"
|
||||
description = "run spack's tests for an install"
|
||||
section = "administrator"
|
||||
level = "long"
|
||||
|
||||
|
||||
def first_line(docstring):
|
||||
"""Return the first line of the docstring."""
|
||||
return docstring.split('\n')[0]
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-H', '--pytest-help', action='store_true', default=False,
|
||||
help="show full pytest help, with advanced options")
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='test_command')
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
list_mutex = list_group.add_mutually_exclusive_group()
|
||||
list_mutex.add_argument(
|
||||
'-l', '--list', action='store_const', default=None,
|
||||
dest='list', const='list', help="list test filenames")
|
||||
list_mutex.add_argument(
|
||||
'-L', '--list-long', action='store_const', default=None,
|
||||
dest='list', const='long', help="list all test functions")
|
||||
list_mutex.add_argument(
|
||||
'-N', '--list-names', action='store_const', default=None,
|
||||
dest='list', const='names', help="list full names of all tests")
|
||||
# Run
|
||||
run_parser = sp.add_parser('run', description=test_run.__doc__,
|
||||
help=first_line(test_run.__doc__))
|
||||
|
||||
# use tests for extension
|
||||
subparser.add_argument(
|
||||
'--extension', default=None,
|
||||
help="run test for a given spack extension")
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
alias_help_msg += " for subsequent access."
|
||||
run_parser.add_argument('--alias', help=alias_help_msg)
|
||||
|
||||
# spell out some common pytest arguments, so they'll show up in help
|
||||
pytest_group = subparser.add_argument_group(
|
||||
"common pytest arguments (spack test --pytest-help for more details)")
|
||||
pytest_group.add_argument(
|
||||
"-s", action='append_const', dest='parsed_args', const='-s',
|
||||
help="print output while tests run (disable capture)")
|
||||
pytest_group.add_argument(
|
||||
"-k", action='store', metavar="EXPRESSION", dest='expression',
|
||||
help="filter tests by keyword (can also use w/list options)")
|
||||
pytest_group.add_argument(
|
||||
"--showlocals", action='append_const', dest='parsed_args',
|
||||
const='--showlocals', help="show local variable values in tracebacks")
|
||||
run_parser.add_argument(
|
||||
'--fail-fast', action='store_true',
|
||||
help="Stop tests for each package after the first failure."
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--fail-first', action='store_true',
|
||||
help="Stop after the first failed package."
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--keep-stage',
|
||||
action='store_true',
|
||||
help='Keep testing directory for debugging'
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--log-format',
|
||||
default=None,
|
||||
choices=spack.report.valid_formats,
|
||||
help="format to be used for log files"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--log-file',
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used"
|
||||
)
|
||||
arguments.add_cdash_args(run_parser, False)
|
||||
run_parser.add_argument(
|
||||
'--help-cdash',
|
||||
action='store_true',
|
||||
help="Show usage instructions for CDash reporting"
|
||||
)
|
||||
|
||||
# remainder is just passed to pytest
|
||||
subparser.add_argument(
|
||||
'pytest_args', nargs=argparse.REMAINDER, help="arguments for pytest")
|
||||
length_group = run_parser.add_mutually_exclusive_group()
|
||||
length_group.add_argument(
|
||||
'--smoke', action='store_true', dest='smoke_test', default=True,
|
||||
help='run smoke tests (default)')
|
||||
length_group.add_argument(
|
||||
'--capability', action='store_false', dest='smoke_test', default=True,
|
||||
help='run full capability tests using pavilion')
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
|
||||
arguments.add_common_arguments(run_parser, ['installed_specs'])
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', description=test_list.__doc__,
|
||||
help=first_line(test_list.__doc__))
|
||||
list_parser.add_argument(
|
||||
'filter', nargs=argparse.REMAINDER,
|
||||
help='optional case-insensitive glob patterns to filter results.')
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser('find', description=test_find.__doc__,
|
||||
help=first_line(test_find.__doc__))
|
||||
find_parser.add_argument(
|
||||
'filter', nargs=argparse.REMAINDER,
|
||||
help='optional case-insensitive glob patterns to filter results.')
|
||||
|
||||
# Status
|
||||
status_parser = sp.add_parser('status', description=test_status.__doc__,
|
||||
help=first_line(test_status.__doc__))
|
||||
status_parser.add_argument(
|
||||
'names', nargs=argparse.REMAINDER,
|
||||
help="Test suites for which to print status")
|
||||
|
||||
# Results
|
||||
results_parser = sp.add_parser('results', description=test_results.__doc__,
|
||||
help=first_line(test_results.__doc__))
|
||||
results_parser.add_argument(
|
||||
'names', nargs=argparse.REMAINDER,
|
||||
help="Test suites for which to print results")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser('remove', description=test_remove.__doc__,
|
||||
help=first_line(test_remove.__doc__))
|
||||
arguments.add_common_arguments(remove_parser, ['yes_to_all'])
|
||||
remove_parser.add_argument(
|
||||
'names', nargs=argparse.REMAINDER,
|
||||
help="Test suites to remove from test stage")
|
||||
|
||||
|
||||
def do_list(args, extra_args):
|
||||
"""Print a lists of tests than what pytest offers."""
|
||||
# Run test collection and get the tree out.
|
||||
old_output = sys.stdout
|
||||
try:
|
||||
sys.stdout = output = StringIO()
|
||||
pytest.main(['--collect-only'] + extra_args)
|
||||
finally:
|
||||
sys.stdout = old_output
|
||||
def test_run(args):
|
||||
"""Run tests for the specified installed packages.
|
||||
|
||||
lines = output.getvalue().split('\n')
|
||||
tests = collections.defaultdict(lambda: set())
|
||||
prefix = []
|
||||
|
||||
# collect tests into sections
|
||||
for line in lines:
|
||||
match = re.match(r"(\s*)<([^ ]*) '([^']*)'", line)
|
||||
if not match:
|
||||
continue
|
||||
indent, nodetype, name = match.groups()
|
||||
|
||||
# strip parametrized tests
|
||||
if "[" in name:
|
||||
name = name[:name.index("[")]
|
||||
|
||||
depth = len(indent) // 2
|
||||
|
||||
if nodetype.endswith("Function"):
|
||||
key = tuple(prefix)
|
||||
tests[key].add(name)
|
||||
else:
|
||||
prefix = prefix[:depth]
|
||||
prefix.append(name)
|
||||
|
||||
def colorize(c, prefix):
|
||||
if isinstance(prefix, tuple):
|
||||
return "::".join(
|
||||
color.colorize("@%s{%s}" % (c, p))
|
||||
for p in prefix if p != "()"
|
||||
)
|
||||
return color.colorize("@%s{%s}" % (c, prefix))
|
||||
|
||||
if args.list == "list":
|
||||
files = set(prefix[0] for prefix in tests)
|
||||
color_files = [colorize("B", file) for file in sorted(files)]
|
||||
colify(color_files)
|
||||
|
||||
elif args.list == "long":
|
||||
for prefix, functions in sorted(tests.items()):
|
||||
path = colorize("*B", prefix) + "::"
|
||||
functions = [colorize("c", f) for f in sorted(functions)]
|
||||
color.cprint(path)
|
||||
colify(functions, indent=4)
|
||||
print()
|
||||
|
||||
else: # args.list == "names"
|
||||
all_functions = [
|
||||
colorize("*B", prefix) + "::" + colorize("c", f)
|
||||
for prefix, functions in sorted(tests.items())
|
||||
for f in sorted(functions)
|
||||
]
|
||||
colify(all_functions)
|
||||
|
||||
|
||||
def add_back_pytest_args(args, unknown_args):
|
||||
"""Add parsed pytest args, unknown args, and remainder together.
|
||||
|
||||
We add some basic pytest arguments to the Spack parser to ensure that
|
||||
they show up in the short help, so we have to reassemble things here.
|
||||
If no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment.
|
||||
"""
|
||||
result = args.parsed_args or []
|
||||
result += unknown_args or []
|
||||
result += args.pytest_args or []
|
||||
if args.expression:
|
||||
result += ["-k", args.expression]
|
||||
return result
|
||||
# cdash help option
|
||||
if args.help_cdash:
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=textwrap.dedent('''\
|
||||
environment variables:
|
||||
SPACK_CDASH_AUTH_TOKEN
|
||||
authentication token to present to CDash
|
||||
'''))
|
||||
arguments.add_cdash_args(parser, True)
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
# set config option for fail-fast
|
||||
if args.fail_fast:
|
||||
spack.config.set('config:fail_fast', True, scope='command_line')
|
||||
|
||||
# Get specs to test
|
||||
env = ev.get_env(args, 'test')
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs_to_test = []
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes)
|
||||
if spec and not matching:
|
||||
tty.warn("No installed packages match spec %s" % spec)
|
||||
specs_to_test.extend(matching)
|
||||
|
||||
# test_stage_dir
|
||||
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
||||
test_suite.ensure_stage()
|
||||
tty.msg("Spack test %s" % test_suite.name)
|
||||
|
||||
# Set up reporter
|
||||
setattr(args, 'package', [s.format() for s in test_suite.specs])
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageBase, 'do_test', args.log_format, args)
|
||||
if not reporter.filename:
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
log_file = args.log_file
|
||||
else:
|
||||
log_dir = os.getcwd()
|
||||
log_file = os.path.join(log_dir, args.log_file)
|
||||
else:
|
||||
log_file = os.path.join(
|
||||
os.getcwd(),
|
||||
'test-%s' % test_suite.name)
|
||||
reporter.filename = log_file
|
||||
reporter.specs = specs_to_test
|
||||
|
||||
with reporter('test', test_suite.stage):
|
||||
if args.smoke_test:
|
||||
test_suite(remove_directory=not args.keep_stage,
|
||||
dirty=args.dirty,
|
||||
fail_first=args.fail_first)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def test(parser, args, unknown_args):
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
sys.argv[0] = 'spack test'
|
||||
return pytest.main(['-h'])
|
||||
def test_list(args):
|
||||
"""List all installed packages with available tests."""
|
||||
raise NotImplementedError
|
||||
|
||||
# add back any parsed pytest args we need to pass to pytest
|
||||
pytest_args = add_back_pytest_args(args, unknown_args)
|
||||
|
||||
# The default is to test the core of Spack. If the option `--extension`
|
||||
# has been used, then test that extension.
|
||||
pytest_root = spack.paths.spack_root
|
||||
if args.extension:
|
||||
target = args.extension
|
||||
extensions = spack.config.get('config:extensions')
|
||||
pytest_root = spack.extensions.path_for_extension(target, *extensions)
|
||||
def test_find(args): # TODO: merge with status (noargs)
|
||||
"""Find tests that are running or have available results.
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with working_dir(pytest_root):
|
||||
if args.list:
|
||||
do_list(args, pytest_args)
|
||||
Displays aliases for tests that have them, otherwise test suite content
|
||||
hashes."""
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
|
||||
# Filter tests by filter argument
|
||||
if args.filter:
|
||||
def create_filter(f):
|
||||
raw = fnmatch.translate('f' if '*' in f or '?' in f
|
||||
else '*' + f + '*')
|
||||
return re.compile(raw, flags=re.IGNORECASE)
|
||||
filters = [create_filter(f) for f in args.filter]
|
||||
|
||||
def match(t, f):
|
||||
return f.match(t)
|
||||
test_suites = [t for t in test_suites
|
||||
if any(match(t.alias, f) for f in filters) and
|
||||
os.path.isdir(t.stage)]
|
||||
|
||||
names = [t.name for t in test_suites]
|
||||
|
||||
if names:
|
||||
# TODO: Make these specify results vs active
|
||||
msg = "Spack test results available for the following tests:\n"
|
||||
msg += " %s\n" % ' '.join(names)
|
||||
msg += " Run `spack test remove` to remove all tests"
|
||||
tty.msg(msg)
|
||||
else:
|
||||
msg = "No test results match the query\n"
|
||||
msg += " Tests may have been removed using `spack test remove`"
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def test_status(args):
|
||||
"""Get the current status for the specified Spack test suite(s)."""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
test_suite = spack.install_test.get_test_suite(name)
|
||||
if test_suite:
|
||||
test_suites.append(test_suite)
|
||||
else:
|
||||
tty.msg("No test suite %s found in test stage" % name)
|
||||
else:
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
if not test_suites:
|
||||
tty.msg("No test suites with status to report")
|
||||
|
||||
for test_suite in test_suites:
|
||||
# TODO: Make this handle capability tests too
|
||||
# TODO: Make this handle tests running in another process
|
||||
tty.msg("Test suite %s completed" % test_suite.name)
|
||||
|
||||
|
||||
def test_results(args):
|
||||
"""Get the results from Spack test suite(s) (default all)."""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
test_suite = spack.install_test.get_test_suite(name)
|
||||
if test_suite:
|
||||
test_suites.append(test_suite)
|
||||
else:
|
||||
tty.msg("No test suite %s found in test stage" % name)
|
||||
else:
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
if not test_suites:
|
||||
tty.msg("No test suites with results to report")
|
||||
|
||||
# TODO: Make this handle capability tests too
|
||||
# The results file may turn out to be a placeholder for future work
|
||||
for test_suite in test_suites:
|
||||
results_file = test_suite.results_file
|
||||
if os.path.exists(results_file):
|
||||
msg = "Results for test suite %s: \n" % test_suite.name
|
||||
with open(results_file, 'r') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
msg += " %s" % line
|
||||
tty.msg(msg)
|
||||
else:
|
||||
msg = "Test %s has no results.\n" % test_suite.name
|
||||
msg += " Check if it is running with "
|
||||
msg += "`spack test status %s`" % test_suite.name
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def test_remove(args):
|
||||
"""Remove results from Spack test suite(s) (default all).
|
||||
|
||||
If no test suite is listed, remove results for all suites.
|
||||
|
||||
Removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results."""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
test_suite = spack.install_test.get_test_suite(name)
|
||||
if test_suite:
|
||||
test_suites.append(test_suite)
|
||||
else:
|
||||
tty.msg("No test suite %s found in test stage" % name)
|
||||
else:
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
|
||||
if not test_suites:
|
||||
tty.msg("No test suites to remove")
|
||||
return
|
||||
|
||||
if not args.yes_to_all:
|
||||
msg = 'The following test suites will be removed:\n\n'
|
||||
msg += ' ' + ' '.join(test.name for test in test_suites) + '\n'
|
||||
tty.msg(msg)
|
||||
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||
if not answer:
|
||||
tty.msg('Aborting removal of test suites')
|
||||
return
|
||||
|
||||
return pytest.main(pytest_args)
|
||||
for test_suite in test_suites:
|
||||
shutil.rmtree(test_suite.stage)
|
||||
|
||||
|
||||
def test(parser, args):
|
||||
globals()['test_%s' % args.test_command](args)
|
||||
|
||||
16
lib/spack/spack/cmd/test_env.py
Normal file
16
lib/spack/spack/cmd/test_env.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = "run a command in a spec's test environment, " \
|
||||
"or dump its environment to screen or file"
|
||||
section = "administration"
|
||||
level = "long"
|
||||
|
||||
setup_parser = env_utility.setup_parser
|
||||
|
||||
|
||||
def test_env(parser, args):
|
||||
env_utility.emulate_env_utility('test-env', 'test', args)
|
||||
169
lib/spack/spack/cmd/unit_test.py
Normal file
169
lib/spack/spack/cmd/unit_test.py
Normal file
@@ -0,0 +1,169 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import collections
|
||||
import sys
|
||||
import re
|
||||
import argparse
|
||||
import pytest
|
||||
from six import StringIO
|
||||
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.paths
|
||||
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
section = "developer"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-H', '--pytest-help', action='store_true', default=False,
|
||||
help="show full pytest help, with advanced options")
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
list_mutex = list_group.add_mutually_exclusive_group()
|
||||
list_mutex.add_argument(
|
||||
'-l', '--list', action='store_const', default=None,
|
||||
dest='list', const='list', help="list test filenames")
|
||||
list_mutex.add_argument(
|
||||
'-L', '--list-long', action='store_const', default=None,
|
||||
dest='list', const='long', help="list all test functions")
|
||||
list_mutex.add_argument(
|
||||
'-N', '--list-names', action='store_const', default=None,
|
||||
dest='list', const='names', help="list full names of all tests")
|
||||
|
||||
# use tests for extension
|
||||
subparser.add_argument(
|
||||
'--extension', default=None,
|
||||
help="run test for a given spack extension")
|
||||
|
||||
# spell out some common pytest arguments, so they'll show up in help
|
||||
pytest_group = subparser.add_argument_group(
|
||||
"common pytest arguments (spack unit-test --pytest-help for more)")
|
||||
pytest_group.add_argument(
|
||||
"-s", action='append_const', dest='parsed_args', const='-s',
|
||||
help="print output while tests run (disable capture)")
|
||||
pytest_group.add_argument(
|
||||
"-k", action='store', metavar="EXPRESSION", dest='expression',
|
||||
help="filter tests by keyword (can also use w/list options)")
|
||||
pytest_group.add_argument(
|
||||
"--showlocals", action='append_const', dest='parsed_args',
|
||||
const='--showlocals', help="show local variable values in tracebacks")
|
||||
|
||||
# remainder is just passed to pytest
|
||||
subparser.add_argument(
|
||||
'pytest_args', nargs=argparse.REMAINDER, help="arguments for pytest")
|
||||
|
||||
|
||||
def do_list(args, extra_args):
|
||||
"""Print a lists of tests than what pytest offers."""
|
||||
# Run test collection and get the tree out.
|
||||
old_output = sys.stdout
|
||||
try:
|
||||
sys.stdout = output = StringIO()
|
||||
pytest.main(['--collect-only'] + extra_args)
|
||||
finally:
|
||||
sys.stdout = old_output
|
||||
|
||||
lines = output.getvalue().split('\n')
|
||||
tests = collections.defaultdict(lambda: set())
|
||||
prefix = []
|
||||
|
||||
# collect tests into sections
|
||||
for line in lines:
|
||||
match = re.match(r"(\s*)<([^ ]*) '([^']*)'", line)
|
||||
if not match:
|
||||
continue
|
||||
indent, nodetype, name = match.groups()
|
||||
|
||||
# strip parametrized tests
|
||||
if "[" in name:
|
||||
name = name[:name.index("[")]
|
||||
|
||||
depth = len(indent) // 2
|
||||
|
||||
if nodetype.endswith("Function"):
|
||||
key = tuple(prefix)
|
||||
tests[key].add(name)
|
||||
else:
|
||||
prefix = prefix[:depth]
|
||||
prefix.append(name)
|
||||
|
||||
def colorize(c, prefix):
|
||||
if isinstance(prefix, tuple):
|
||||
return "::".join(
|
||||
color.colorize("@%s{%s}" % (c, p))
|
||||
for p in prefix if p != "()"
|
||||
)
|
||||
return color.colorize("@%s{%s}" % (c, prefix))
|
||||
|
||||
if args.list == "list":
|
||||
files = set(prefix[0] for prefix in tests)
|
||||
color_files = [colorize("B", file) for file in sorted(files)]
|
||||
colify(color_files)
|
||||
|
||||
elif args.list == "long":
|
||||
for prefix, functions in sorted(tests.items()):
|
||||
path = colorize("*B", prefix) + "::"
|
||||
functions = [colorize("c", f) for f in sorted(functions)]
|
||||
color.cprint(path)
|
||||
colify(functions, indent=4)
|
||||
print()
|
||||
|
||||
else: # args.list == "names"
|
||||
all_functions = [
|
||||
colorize("*B", prefix) + "::" + colorize("c", f)
|
||||
for prefix, functions in sorted(tests.items())
|
||||
for f in sorted(functions)
|
||||
]
|
||||
colify(all_functions)
|
||||
|
||||
|
||||
def add_back_pytest_args(args, unknown_args):
|
||||
"""Add parsed pytest args, unknown args, and remainder together.
|
||||
|
||||
We add some basic pytest arguments to the Spack parser to ensure that
|
||||
they show up in the short help, so we have to reassemble things here.
|
||||
"""
|
||||
result = args.parsed_args or []
|
||||
result += unknown_args or []
|
||||
result += args.pytest_args or []
|
||||
if args.expression:
|
||||
result += ["-k", args.expression]
|
||||
return result
|
||||
|
||||
|
||||
def unit_test(parser, args, unknown_args):
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
sys.argv[0] = 'spack test'
|
||||
return pytest.main(['-h'])
|
||||
|
||||
# add back any parsed pytest args we need to pass to pytest
|
||||
pytest_args = add_back_pytest_args(args, unknown_args)
|
||||
|
||||
# The default is to test the core of Spack. If the option `--extension`
|
||||
# has been used, then test that extension.
|
||||
pytest_root = spack.paths.spack_root
|
||||
if args.extension:
|
||||
target = args.extension
|
||||
extensions = spack.config.get('config:extensions')
|
||||
pytest_root = spack.extensions.path_for_extension(target, *extensions)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with working_dir(pytest_root):
|
||||
if args.list:
|
||||
do_list(args, pytest_args)
|
||||
return
|
||||
|
||||
return pytest.main(pytest_args)
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.version
|
||||
import spack.architecture
|
||||
import spack.util.executable
|
||||
import spack.util.module_cmd
|
||||
@@ -28,7 +29,7 @@
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
@@ -42,6 +43,18 @@ def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(
|
||||
compiler_path, required=True)
|
||||
|
||||
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
||||
|
||||
|
||||
def tokenize_flags(flags_str):
|
||||
"""Given a compiler flag specification as a string, this returns a list
|
||||
where the entries are the flags. For compiler options which set values
|
||||
@@ -189,7 +202,7 @@ class Compiler(object):
|
||||
fc_names = []
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains, e.g. 'powerpc-bgq-linux-'
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes = []
|
||||
|
||||
# Optional suffix regexes for searching for this type of compiler.
|
||||
@@ -266,7 +279,8 @@ def __init__(self, cspec, operating_system, target,
|
||||
self.target = target
|
||||
self.modules = modules or []
|
||||
self.alias = alias
|
||||
self.extra_rpaths = extra_rpaths
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
|
||||
self.cc = paths[0]
|
||||
@@ -280,9 +294,6 @@ def __init__(self, cspec, operating_system, target,
|
||||
else:
|
||||
self.fc = paths[3]
|
||||
|
||||
self.environment = environment
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
|
||||
# Unfortunately have to make sure these params are accepted
|
||||
# in the same order they are returned by sorted(flags)
|
||||
# in compilers/__init__.py
|
||||
@@ -292,6 +303,10 @@ def __init__(self, cspec, operating_system, target,
|
||||
if value is not None:
|
||||
self.flags[flag] = tokenize_flags(value)
|
||||
|
||||
# caching value for compiler reported version
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
@@ -321,6 +336,20 @@ def accessible_exe(exe):
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
|
||||
@property
|
||||
def real_version(self):
|
||||
"""Executable reported compiler version used for API-determinations
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
if not self._real_version:
|
||||
try:
|
||||
self._real_version = spack.version.Version(
|
||||
self.get_real_version())
|
||||
except spack.util.executable.ProcessError:
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self):
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
@@ -576,9 +576,7 @@ def _default(search_paths):
|
||||
)
|
||||
command_arguments.append(detect_version_args)
|
||||
|
||||
# Reverse it here so that the dict creation (last insert wins)
|
||||
# does not spoil the intended precedence.
|
||||
return reversed(command_arguments)
|
||||
return command_arguments
|
||||
|
||||
fn = getattr(
|
||||
operating_system, 'arguments_to_detect_version_fn', _default
|
||||
@@ -650,23 +648,18 @@ def make_compiler_list(detected_versions):
|
||||
Returns:
|
||||
list of Compiler objects
|
||||
"""
|
||||
# We don't sort on the path of the compiler
|
||||
sort_fn = lambda x: (x.id, x.variation, x.language)
|
||||
compilers_s = sorted(detected_versions, key=sort_fn)
|
||||
group_fn = lambda x: (x.id, x.variation, x.language)
|
||||
sorted_compilers = sorted(detected_versions, key=group_fn)
|
||||
|
||||
# Gather items in a dictionary by the id, name variation and language
|
||||
compilers_d = {}
|
||||
for sort_key, group in itertools.groupby(compilers_s, key=sort_fn):
|
||||
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
|
||||
compiler_id, name_variation, language = sort_key
|
||||
by_compiler_id = compilers_d.setdefault(compiler_id, {})
|
||||
by_name_variation = by_compiler_id.setdefault(name_variation, {})
|
||||
by_name_variation[language] = next(x.path for x in group)
|
||||
|
||||
# For each unique compiler id select the name variation with most entries
|
||||
# i.e. the one that supports most languages
|
||||
compilers = []
|
||||
|
||||
def _default(cmp_id, paths):
|
||||
def _default_make_compilers(cmp_id, paths):
|
||||
operating_system, compiler_name, version = cmp_id
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
@@ -677,16 +670,38 @@ def _default(cmp_id, paths):
|
||||
)
|
||||
return [compiler]
|
||||
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
_, selected_name_variation = max(
|
||||
(len(by_compiler_id[variation]), variation)
|
||||
for variation in by_compiler_id
|
||||
)
|
||||
# For compilers with the same compiler id:
|
||||
#
|
||||
# - Prefer with C compiler to without
|
||||
# - Prefer with C++ compiler to without
|
||||
# - Prefer no variations to variations (e.g., clang to clang-gpu)
|
||||
#
|
||||
sort_fn = lambda variation: (
|
||||
'cc' not in by_compiler_id[variation], # None last
|
||||
'cxx' not in by_compiler_id[variation], # None last
|
||||
getattr(variation, 'prefix', None),
|
||||
getattr(variation, 'suffix', None),
|
||||
)
|
||||
|
||||
compilers = []
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
ordered = sorted(by_compiler_id, key=sort_fn)
|
||||
selected_variation = ordered[0]
|
||||
selected = by_compiler_id[selected_variation]
|
||||
|
||||
# fill any missing parts from subsequent entries
|
||||
for lang in ['cxx', 'f77', 'fc']:
|
||||
if lang not in selected:
|
||||
next_lang = next((
|
||||
by_compiler_id[v][lang] for v in ordered
|
||||
if lang in by_compiler_id[v]), None)
|
||||
if next_lang:
|
||||
selected[lang] = next_lang
|
||||
|
||||
# Add it to the list of compilers
|
||||
selected = by_compiler_id[selected_name_variation]
|
||||
operating_system, _, _ = compiler_id
|
||||
make_compilers = getattr(operating_system, 'make_compilers', _default)
|
||||
make_compilers = getattr(
|
||||
operating_system, 'make_compilers', _default_make_compilers)
|
||||
|
||||
compilers.extend(make_compilers(compiler_id, selected))
|
||||
|
||||
return compilers
|
||||
|
||||
@@ -23,7 +23,12 @@ def extract_version_from_output(cls, output):
|
||||
ver = 'unknown'
|
||||
match = re.search(
|
||||
# Apple's LLVM compiler has its own versions, so suffix them.
|
||||
r'^Apple (?:LLVM|clang) version ([^ )]+)', output
|
||||
r'^Apple (?:LLVM|clang) version ([^ )]+)',
|
||||
output,
|
||||
# Multi-line, since 'Apple clang' may not be on the first line
|
||||
# in particular, when run as gcc, it seems to output
|
||||
# "Configured with: --prefix=..." as the first line
|
||||
re.M,
|
||||
)
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
@@ -33,7 +38,7 @@ def extract_version_from_output(cls, output):
|
||||
def cxx11_flag(self):
|
||||
# Adapted from CMake's AppleClang-CXX rules
|
||||
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
||||
if self.version < spack.version.ver('4.0.0'):
|
||||
if self.real_version < spack.version.ver('4.0.0'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
|
||||
)
|
||||
@@ -42,11 +47,11 @@ def cxx11_flag(self):
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# Adapted from CMake's rules for AppleClang
|
||||
if self.version < spack.version.ver('5.1.0'):
|
||||
if self.real_version < spack.version.ver('5.1.0'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
|
||||
)
|
||||
elif self.version < spack.version.ver('6.1.0'):
|
||||
elif self.real_version < spack.version.ver('6.1.0'):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
@@ -54,7 +59,7 @@ def cxx14_flag(self):
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
# Adapted from CMake's rules for AppleClang
|
||||
if self.version < spack.version.ver('6.1.0'):
|
||||
if self.real_version < spack.version.ver('6.1.0'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
|
||||
)
|
||||
|
||||
@@ -34,7 +34,7 @@ class Cce(Compiler):
|
||||
|
||||
@property
|
||||
def is_clang_based(self):
|
||||
version = self.version
|
||||
version = self._real_version or self.version
|
||||
return version >= ver('9.0') and 'classic' not in str(version)
|
||||
|
||||
@property
|
||||
@@ -69,9 +69,9 @@ def cxx11_flag(self):
|
||||
def c99_flag(self):
|
||||
if self.is_clang_based:
|
||||
return '-std=c99'
|
||||
elif self.version >= ver('8.4'):
|
||||
elif self.real_version >= ver('8.4'):
|
||||
return '-h std=c99,noconform,gnu'
|
||||
elif self.version >= ver('8.1'):
|
||||
elif self.real_version >= ver('8.1'):
|
||||
return '-h c99,noconform,gnu'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C99 standard',
|
||||
@@ -82,7 +82,7 @@ def c99_flag(self):
|
||||
def c11_flag(self):
|
||||
if self.is_clang_based:
|
||||
return '-std=c11'
|
||||
elif self.version >= ver('8.5'):
|
||||
elif self.real_version >= ver('8.5'):
|
||||
return '-h std=c11,noconform,gnu'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C11 standard',
|
||||
|
||||
@@ -90,7 +90,7 @@ def verbose_flag(self):
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('3.3'):
|
||||
if self.real_version < ver('3.3'):
|
||||
raise UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", "< 3.3"
|
||||
)
|
||||
@@ -98,22 +98,22 @@ def cxx11_flag(self):
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.version < ver('3.4'):
|
||||
if self.real_version < ver('3.4'):
|
||||
raise UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "< 3.5"
|
||||
)
|
||||
elif self.version < ver('3.5'):
|
||||
elif self.real_version < ver('3.5'):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.version < ver('3.5'):
|
||||
if self.real_version < ver('3.5'):
|
||||
raise UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "< 3.5"
|
||||
)
|
||||
elif self.version < ver('5.0'):
|
||||
elif self.real_version < ver('5.0'):
|
||||
return "-std=c++1z"
|
||||
|
||||
return "-std=c++17"
|
||||
@@ -124,7 +124,7 @@ def c99_flag(self):
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version < ver('6.1.0'):
|
||||
if self.real_version < ver('6.1.0'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C11 standard",
|
||||
"c11_flag",
|
||||
|
||||
@@ -26,7 +26,7 @@ class Fj(spack.compiler.Compiler):
|
||||
'fc': 'fj/frt'}
|
||||
|
||||
version_argument = '--version'
|
||||
version_regex = r'\((?:FCC|FRT)\) ([\d.]+)'
|
||||
version_regex = r'\((?:FCC|FRT)\) ([a-z\d.]+)'
|
||||
|
||||
required_libs = ['libfj90i', 'libfj90f', 'libfjsrcinfo']
|
||||
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
|
||||
import re
|
||||
|
||||
import spack.compilers.clang
|
||||
import spack.compiler
|
||||
import spack.compilers.apple_clang as apple_clang
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Gcc(Compiler):
|
||||
class Gcc(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['gcc']
|
||||
|
||||
@@ -56,65 +56,55 @@ def openmp_flag(self):
|
||||
|
||||
@property
|
||||
def cxx98_flag(self):
|
||||
if self.version < ver('6.0'):
|
||||
if self.real_version < ver('6.0'):
|
||||
return ""
|
||||
else:
|
||||
return "-std=c++98"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('4.3'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++11 standard",
|
||||
"cxx11_flag",
|
||||
" < 4.3")
|
||||
elif self.version < ver('4.7'):
|
||||
if self.real_version < ver('4.3'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", " < 4.3")
|
||||
elif self.real_version < ver('4.7'):
|
||||
return "-std=c++0x"
|
||||
else:
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.version < ver('4.8'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++14 standard",
|
||||
"cxx14_flag",
|
||||
"< 4.8")
|
||||
elif self.version < ver('4.9'):
|
||||
if self.real_version < ver('4.8'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "< 4.8")
|
||||
elif self.real_version < ver('4.9'):
|
||||
return "-std=c++1y"
|
||||
elif self.version < ver('6.0'):
|
||||
elif self.real_version < ver('6.0'):
|
||||
return "-std=c++14"
|
||||
else:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.version < ver('5.0'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++17 standard",
|
||||
"cxx17_flag",
|
||||
"< 5.0")
|
||||
elif self.version < ver('6.0'):
|
||||
if self.real_version < ver('5.0'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "< 5.0")
|
||||
elif self.real_version < ver('6.0'):
|
||||
return "-std=c++1z"
|
||||
else:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.version < ver('4.5'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C99 standard",
|
||||
"c99_flag",
|
||||
"< 4.5")
|
||||
if self.real_version < ver('4.5'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.5")
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version < ver('4.7'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C11 standard",
|
||||
"c11_flag",
|
||||
"< 4.7")
|
||||
if self.real_version < ver('4.7'):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.7")
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
@@ -152,10 +142,10 @@ def default_version(cls, cc):
|
||||
|
||||
7.2.0
|
||||
"""
|
||||
# Skip any gcc versions that are actually clang, like Apple's gcc.
|
||||
# Returning "unknown" makes them not detected by default.
|
||||
# Users can add these manually to compilers.yaml at their own risk.
|
||||
if spack.compilers.clang.Clang.default_version(cc) != 'unknown':
|
||||
# Apple's gcc is actually apple clang, so skip it. Returning
|
||||
# "unknown" ensures this compiler is not detected by default.
|
||||
# Users can add it manually to compilers.yaml at their own risk.
|
||||
if apple_clang.AppleClang.default_version(cc) != 'unknown':
|
||||
return 'unknown'
|
||||
|
||||
version = super(Gcc, cls).default_version(cc)
|
||||
|
||||
@@ -48,20 +48,20 @@ def opt_flags(self):
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
if self.version < ver('16.0'):
|
||||
if self.real_version < ver('16.0'):
|
||||
return "-openmp"
|
||||
else:
|
||||
return "-qopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('11.1'):
|
||||
if self.real_version < ver('11.1'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++11 standard",
|
||||
"cxx11_flag",
|
||||
"< 11.1")
|
||||
|
||||
elif self.version < ver('13'):
|
||||
elif self.real_version < ver('13'):
|
||||
return "-std=c++0x"
|
||||
else:
|
||||
return "-std=c++11"
|
||||
@@ -69,19 +69,19 @@ def cxx11_flag(self):
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# Adapted from CMake's Intel-CXX rules.
|
||||
if self.version < ver('15'):
|
||||
if self.real_version < ver('15'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++14 standard",
|
||||
"cxx14_flag",
|
||||
"< 15")
|
||||
elif self.version < ver('15.0.2'):
|
||||
elif self.real_version < ver('15.0.2'):
|
||||
return "-std=c++1y"
|
||||
else:
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.version < ver('12'):
|
||||
if self.real_version < ver('12'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C99 standard",
|
||||
"c99_flag",
|
||||
@@ -91,7 +91,7 @@ def c99_flag(self):
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version < ver('16'):
|
||||
if self.real_version < ver('16'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C11 standard",
|
||||
"c11_flag",
|
||||
|
||||
@@ -73,7 +73,7 @@ def fc_pic_flag(self):
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.version >= ver('12.10'):
|
||||
if self.real_version >= ver('12.10'):
|
||||
return '-c99'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C99 standard',
|
||||
@@ -82,7 +82,7 @@ def c99_flag(self):
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version >= ver('15.3'):
|
||||
if self.real_version >= ver('15.3'):
|
||||
return '-c11'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C11 standard',
|
||||
|
||||
@@ -47,7 +47,7 @@ def openmp_flag(self):
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('13.1'):
|
||||
if self.real_version < ver('13.1'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++11 standard",
|
||||
"cxx11_flag",
|
||||
@@ -57,9 +57,9 @@ def cxx11_flag(self):
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.version >= ver('13.1.1'):
|
||||
if self.real_version >= ver('13.1.1'):
|
||||
return '-std=gnu99'
|
||||
if self.version >= ver('10.1'):
|
||||
if self.real_version >= ver('10.1'):
|
||||
return '-qlanglvl=extc99'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C99 standard',
|
||||
@@ -68,9 +68,9 @@ def c99_flag(self):
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version >= ver('13.1.2'):
|
||||
if self.real_version >= ver('13.1.2'):
|
||||
return '-std=gnu11'
|
||||
if self.version >= ver('12.1'):
|
||||
if self.real_version >= ver('12.1'):
|
||||
return '-qlanglvl=extc1x'
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
'the C11 standard',
|
||||
|
||||
@@ -365,6 +365,7 @@ def _proper_compiler_style(cspec, aspec):
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
cspec, arch_spec=aspec
|
||||
)
|
||||
|
||||
# If the spec passed as argument is concrete we want to check
|
||||
# the versions match exactly
|
||||
if (cspec.concrete and compilers and
|
||||
@@ -454,7 +455,7 @@ def concretize_compiler_flags(self, spec):
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
raise Exception
|
||||
compiler_match = lambda other: (
|
||||
spec.compiler == other.compiler and
|
||||
spec.architecture == other.architecture)
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
@@ -140,6 +141,10 @@ def __init__(self, name, path):
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
return '/' in self.name
|
||||
|
||||
def get_section_filename(self, section):
|
||||
_validate_section_name(section)
|
||||
return os.path.join(self.path, "%s.yaml" % section)
|
||||
@@ -183,18 +188,27 @@ def __init__(self, name, path, schema, yaml_path=None):
|
||||
|
||||
Arguments:
|
||||
schema (dict): jsonschema for the file to read
|
||||
yaml_path (list): list of dict keys in the schema where
|
||||
config data can be found;
|
||||
yaml_path (list): path in the schema where config data can be
|
||||
found.
|
||||
If the schema accepts the following yaml data, the yaml_path
|
||||
would be ['outer', 'inner']
|
||||
|
||||
Elements of ``yaml_path`` can be tuples or lists to represent an
|
||||
"or" of keys (e.g. "env" or "spack" is ``('env', 'spack')``)
|
||||
.. code-block:: yaml
|
||||
|
||||
outer:
|
||||
inner:
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super(SingleFileScope, self).__init__(name, path)
|
||||
self._raw_data = None
|
||||
self.schema = schema
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section):
|
||||
return self.path
|
||||
|
||||
@@ -229,32 +243,54 @@ def get_section(self, section):
|
||||
if self._raw_data is None:
|
||||
return None
|
||||
|
||||
section_data = self._raw_data
|
||||
for key in self.yaml_path:
|
||||
if self._raw_data is None:
|
||||
if section_data is None:
|
||||
return None
|
||||
section_data = section_data[key]
|
||||
|
||||
# support tuples as "or" in the yaml path
|
||||
if isinstance(key, (list, tuple)):
|
||||
key = first_existing(self._raw_data, key)
|
||||
|
||||
self._raw_data = self._raw_data[key]
|
||||
|
||||
for section_key, data in self._raw_data.items():
|
||||
for section_key, data in section_data.items():
|
||||
self.sections[section_key] = {section_key: data}
|
||||
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def write_section(self, section):
|
||||
validate(self.sections, self.schema)
|
||||
data_to_write = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
# been written to disk. We need to construct the portion of the data
|
||||
# from the root of self._raw_data to the level at which the config
|
||||
# sections are defined. That requires creating keys for every entry in
|
||||
# self.yaml_path
|
||||
if not data_to_write:
|
||||
data_to_write = {}
|
||||
# reverse because we construct it from the inside out
|
||||
for key in reversed(self.yaml_path):
|
||||
data_to_write = {key: data_to_write}
|
||||
|
||||
# data_update_pointer is a pointer to the part of data_to_write
|
||||
# that we are currently updating.
|
||||
# We start by traversing into the data to the point at which the
|
||||
# config sections are defined. This means popping the keys from
|
||||
# self.yaml_path
|
||||
data_update_pointer = data_to_write
|
||||
for key in self.yaml_path:
|
||||
data_update_pointer = data_update_pointer[key]
|
||||
|
||||
# For each section, update the data at the level of our pointer
|
||||
# with the data from the section
|
||||
for key, data in self.sections.items():
|
||||
data_update_pointer[key] = data[key]
|
||||
|
||||
validate(data_to_write, self.schema)
|
||||
try:
|
||||
parent = os.path.dirname(self.path)
|
||||
mkdirp(parent)
|
||||
|
||||
tmp = os.path.join(parent, '.%s.tmp' % self.path)
|
||||
tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path))
|
||||
with open(tmp, 'w') as f:
|
||||
syaml.dump_config(self.sections, stream=f,
|
||||
syaml.dump_config(data_to_write, stream=f,
|
||||
default_flow_style=False)
|
||||
os.path.move(tmp, self.path)
|
||||
os.rename(tmp, self.path)
|
||||
except (yaml.YAMLError, IOError) as e:
|
||||
raise ConfigFileError(
|
||||
"Error writing to config file: '%s'" % str(e))
|
||||
@@ -352,6 +388,7 @@ def __init__(self, *scopes):
|
||||
self.scopes = OrderedDict()
|
||||
for scope in scopes:
|
||||
self.push_scope(scope)
|
||||
self.format_updates = collections.defaultdict(list)
|
||||
|
||||
def push_scope(self, scope):
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
@@ -378,7 +415,9 @@ def remove_scope(self, scope_name):
|
||||
@property
|
||||
def file_scopes(self):
|
||||
"""List of writable scopes with an associated file."""
|
||||
return [s for s in self.scopes.values() if type(s) == ConfigScope]
|
||||
return [s for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope
|
||||
or type(s) == SingleFileScope)]
|
||||
|
||||
def highest_precedence_scope(self):
|
||||
"""Non-internal scope with highest precedence."""
|
||||
@@ -390,7 +429,7 @@ def highest_precedence_non_platform_scope(self):
|
||||
Platform-specific scopes are of the form scope/platform"""
|
||||
generator = reversed(self.file_scopes)
|
||||
highest = next(generator, None)
|
||||
while highest and '/' in highest.name:
|
||||
while highest and highest.is_platform_dependent:
|
||||
highest = next(generator, None)
|
||||
return highest
|
||||
|
||||
@@ -440,7 +479,7 @@ def clear_caches(self):
|
||||
for scope in self.scopes.values():
|
||||
scope.clear()
|
||||
|
||||
def update_config(self, section, update_data, scope=None):
|
||||
def update_config(self, section, update_data, scope=None, force=False):
|
||||
"""Update the configuration file for a particular scope.
|
||||
|
||||
Overwrites contents of a section in a scope with update_data,
|
||||
@@ -449,7 +488,26 @@ def update_config(self, section, update_data, scope=None):
|
||||
update_data should have the top-level section name stripped off
|
||||
(it will be re-added). Data itself can be a list, dict, or any
|
||||
other yaml-ish structure.
|
||||
|
||||
Configuration scopes that are still written in an old schema
|
||||
format will fail to update unless ``force`` is True.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration to be updated
|
||||
update_data (dict): data to be used for the update
|
||||
scope (str): scope to be updated
|
||||
force (str): force the update
|
||||
"""
|
||||
if self.format_updates.get(section) and not force:
|
||||
msg = ('The "{0}" section of the configuration needs to be written'
|
||||
' to disk, but is currently using a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack config [--scope=<scope] update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
'use the updated configuration.')
|
||||
msg = msg.format(section)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
_validate_section_name(section) # validate section name
|
||||
scope = self._validate_scope(scope) # get ConfigScope object
|
||||
|
||||
@@ -514,6 +572,15 @@ def get_config(self, section, scope=None):
|
||||
if section not in data:
|
||||
continue
|
||||
|
||||
# We might be reading configuration files in an old format,
|
||||
# thus read data and update it in memory if need be.
|
||||
changed = _update_in_memory(data, section)
|
||||
if changed:
|
||||
self.format_updates[section].append(scope)
|
||||
msg = ('OUTDATED CONFIGURATION FILE '
|
||||
'[section={0}, scope={1}, dir={2}]')
|
||||
tty.debug(msg.format(section, scope.name, scope.path))
|
||||
|
||||
merged_section = merge_yaml(merged_section, data)
|
||||
|
||||
# no config files -- empty config.
|
||||
@@ -723,7 +790,7 @@ def get(path, default=None, scope=None):
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
"""Convenience function for getting single values in config files.
|
||||
"""Convenience function for setting single values in config files.
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
@@ -999,6 +1066,41 @@ def default_list_scope():
|
||||
return None
|
||||
|
||||
|
||||
def _update_in_memory(data, section):
|
||||
"""Update the format of the configuration data in memory.
|
||||
|
||||
This function assumes the section is valid (i.e. validation
|
||||
is responsibility of the caller)
|
||||
|
||||
Args:
|
||||
data (dict): configuration data
|
||||
section (str): section of the configuration to update
|
||||
|
||||
Returns:
|
||||
True if the data was changed, False otherwise
|
||||
"""
|
||||
update_fn = ensure_latest_format_fn(section)
|
||||
changed = update_fn(data[section])
|
||||
return changed
|
||||
|
||||
|
||||
def ensure_latest_format_fn(section):
|
||||
"""Return a function that takes as input a dictionary read from
|
||||
a configuration file and update it to the latest format.
|
||||
|
||||
The function returns True if there was any update, False otherwise.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration e.g. "packages",
|
||||
"config", etc.
|
||||
"""
|
||||
# The line below is based on the fact that every module we need
|
||||
# is already imported at the top level
|
||||
section_module = getattr(spack.schema, section)
|
||||
update_fn = getattr(section_module, 'update', lambda x: False)
|
||||
return update_fn
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
"""Superclass for all Spack config related errors."""
|
||||
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3",
|
||||
"0.15.4": "0.15.4"
|
||||
}
|
||||
},
|
||||
"ubuntu:16.04": {
|
||||
@@ -30,7 +33,10 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3",
|
||||
"0.15.4": "0.15.4"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
@@ -47,7 +53,10 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3",
|
||||
"0.15.4": "0.15.4"
|
||||
}
|
||||
},
|
||||
"centos:6": {
|
||||
@@ -64,7 +73,10 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3",
|
||||
"0.15.4": "0.15.4"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,26 @@
|
||||
default_deptype = ('build', 'link')
|
||||
|
||||
|
||||
def deptype_chars(*type_tuples):
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged).
|
||||
"""
|
||||
types = set()
|
||||
for t in type_tuples:
|
||||
if t:
|
||||
types.update(t)
|
||||
|
||||
return ''.join(t[0] if t in types else ' ' for t in all_deptypes)
|
||||
|
||||
|
||||
def canonical_deptype(deptype):
|
||||
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
|
||||
|
||||
@@ -108,3 +128,8 @@ def merge(self, other):
|
||||
self.patches[cond].extend(other.patches[cond])
|
||||
else:
|
||||
self.patches[cond] = other.patches[cond]
|
||||
|
||||
def __repr__(self):
|
||||
types = deptype_chars(self.type)
|
||||
return '<Dependency: %s -> %s [%s]>' % (
|
||||
self.pkg.name, self.spec, types)
|
||||
|
||||
@@ -175,9 +175,20 @@ def activate(
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
try:
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
'Environment view is broken due to a missing package or repo.\n',
|
||||
' To activate without views enabled, activate with:\n',
|
||||
' spack env activate -V {0}\n'.format(env.name),
|
||||
' To remove it and resolve the issue, '
|
||||
'force concretize with the command:\n',
|
||||
' spack -e {0} concretize --force'.format(env.name))
|
||||
|
||||
return cmds
|
||||
|
||||
@@ -230,9 +241,15 @@ def deactivate(shell='sh'):
|
||||
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
|
||||
cmds += 'fi;\n'
|
||||
|
||||
if default_view_name in _active_environment.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += _active_environment.rm_default_view_from_shell(shell)
|
||||
try:
|
||||
if default_view_name in _active_environment.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += _active_environment.rm_default_view_from_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn('Could not fully deactivate view due to missing package '
|
||||
'or repo, shell environment may be corrupt.')
|
||||
|
||||
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
|
||||
_active_environment = None
|
||||
@@ -446,8 +463,9 @@ def _eval_conditional(string):
|
||||
|
||||
|
||||
class ViewDescriptor(object):
|
||||
def __init__(self, root, projections={}, select=[], exclude=[],
|
||||
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
|
||||
link=default_view_link):
|
||||
self.base = base_path
|
||||
self.root = root
|
||||
self.projections = projections
|
||||
self.select = select
|
||||
@@ -477,15 +495,19 @@ def to_dict(self):
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
return ViewDescriptor(d['root'],
|
||||
def from_dict(base_path, d):
|
||||
return ViewDescriptor(base_path,
|
||||
d['root'],
|
||||
d.get('projections', {}),
|
||||
d.get('select', []),
|
||||
d.get('exclude', []),
|
||||
d.get('link', default_view_link))
|
||||
|
||||
def view(self):
|
||||
return YamlFilesystemView(self.root, spack.store.layout,
|
||||
root = self.root
|
||||
if not os.path.isabs(root):
|
||||
root = os.path.normpath(os.path.join(self.base, self.root))
|
||||
return YamlFilesystemView(root, spack.store.layout,
|
||||
ignore_conflicts=True,
|
||||
projections=self.projections)
|
||||
|
||||
@@ -527,20 +549,29 @@ def regenerate(self, all_specs, roots):
|
||||
installed_specs_for_view = set(
|
||||
s for s in specs_for_view if s in self and s.package.installed)
|
||||
|
||||
view = self.view()
|
||||
# To ensure there are no conflicts with packages being installed
|
||||
# that cannot be resolved or have repos that have been removed
|
||||
# we always regenerate the view from scratch. We must first make
|
||||
# sure the root directory exists for the very first time though.
|
||||
root = self.root
|
||||
if not os.path.isabs(root):
|
||||
root = os.path.normpath(os.path.join(self.base, self.root))
|
||||
fs.mkdirp(root)
|
||||
with fs.replace_directory_transaction(root):
|
||||
view = self.view()
|
||||
|
||||
view.clean()
|
||||
specs_in_view = set(view.get_all_specs())
|
||||
tty.msg("Updating view at {0}".format(self.root))
|
||||
view.clean()
|
||||
specs_in_view = set(view.get_all_specs())
|
||||
tty.msg("Updating view at {0}".format(self.root))
|
||||
|
||||
rm_specs = specs_in_view - installed_specs_for_view
|
||||
add_specs = installed_specs_for_view - specs_in_view
|
||||
rm_specs = specs_in_view - installed_specs_for_view
|
||||
add_specs = installed_specs_for_view - specs_in_view
|
||||
|
||||
# pass all_specs in, as it's expensive to read all the
|
||||
# spec.yaml files twice.
|
||||
view.remove_specs(*rm_specs, with_dependents=False,
|
||||
all_specs=specs_in_view)
|
||||
view.add_specs(*add_specs, with_dependencies=False)
|
||||
# pass all_specs in, as it's expensive to read all the
|
||||
# spec.yaml files twice.
|
||||
view.remove_specs(*rm_specs, with_dependents=False,
|
||||
all_specs=specs_in_view)
|
||||
view.add_specs(*add_specs, with_dependencies=False)
|
||||
|
||||
|
||||
class Environment(object):
|
||||
@@ -586,9 +617,11 @@ def __init__(self, path, init_file=None, with_view=None):
|
||||
self.views = {}
|
||||
elif with_view is True:
|
||||
self.views = {
|
||||
default_view_name: ViewDescriptor(self.view_path_default)}
|
||||
default_view_name: ViewDescriptor(self.path,
|
||||
self.view_path_default)}
|
||||
elif isinstance(with_view, six.string_types):
|
||||
self.views = {default_view_name: ViewDescriptor(with_view)}
|
||||
self.views = {default_view_name: ViewDescriptor(self.path,
|
||||
with_view)}
|
||||
# If with_view is None, then defer to the view settings determined by
|
||||
# the manifest file
|
||||
|
||||
@@ -659,11 +692,14 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {
|
||||
default_view_name: ViewDescriptor(self.view_path_default)}
|
||||
default_view_name: ViewDescriptor(self.path,
|
||||
self.view_path_default)}
|
||||
elif isinstance(enable_view, six.string_types):
|
||||
self.views = {default_view_name: ViewDescriptor(enable_view)}
|
||||
self.views = {default_view_name: ViewDescriptor(self.path,
|
||||
enable_view)}
|
||||
elif enable_view:
|
||||
self.views = dict((name, ViewDescriptor.from_dict(values))
|
||||
path = self.path
|
||||
self.views = dict((name, ViewDescriptor.from_dict(path, values))
|
||||
for name, values in enable_view.items())
|
||||
else:
|
||||
self.views = {}
|
||||
@@ -776,6 +812,7 @@ def included_config_scopes(self):
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = config_dict(self.yaml).get('include', [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
@@ -790,15 +827,22 @@ def included_config_scopes(self):
|
||||
config_name = 'env:%s:%s' % (
|
||||
self.name, os.path.basename(config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
else:
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
base, ext = os.path.splitext(os.path.basename(config_path))
|
||||
config_name = 'env:%s:%s' % (self.name, base)
|
||||
config_name = 'env:%s:%s' % (self.name, config_path)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = 'Detected {0} missing include path(s):'.format(len(missing))
|
||||
msg += '\n {0}'.format('\n '.join(missing))
|
||||
tty.die('{0}\nPlease correct and try again.'.format(msg))
|
||||
|
||||
return scopes
|
||||
|
||||
def env_file_config_scope_name(self):
|
||||
@@ -808,24 +852,17 @@ def env_file_config_scope_name(self):
|
||||
def env_file_config_scope(self):
|
||||
"""Get the configuration scope for the environment's manifest file."""
|
||||
config_name = self.env_file_config_scope_name()
|
||||
return spack.config.SingleFileScope(config_name,
|
||||
self.manifest_path,
|
||||
spack.schema.env.schema,
|
||||
[spack.schema.env.keys])
|
||||
return spack.config.SingleFileScope(
|
||||
config_name,
|
||||
self.manifest_path,
|
||||
spack.schema.env.schema,
|
||||
[spack.config.first_existing(self.raw_yaml,
|
||||
spack.schema.env.keys)])
|
||||
|
||||
def config_scopes(self):
|
||||
"""A list of all configuration scopes for this environment."""
|
||||
return self.included_config_scopes() + [self.env_file_config_scope()]
|
||||
|
||||
def set_config(self, path, value):
|
||||
"""Set configuration for this environment"""
|
||||
yaml = config_dict(self.yaml)
|
||||
keys = spack.config.process_config_path(path)
|
||||
for key in keys[:-1]:
|
||||
yaml = yaml[key]
|
||||
yaml[keys[-1]] = value
|
||||
self.write()
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
@@ -910,6 +947,7 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"Not found: {0}".format(query_spec))
|
||||
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
for spec in matches:
|
||||
if spec in list_to_change:
|
||||
try:
|
||||
@@ -1097,7 +1135,7 @@ def update_default_view(self, viewpath):
|
||||
if name in self.views:
|
||||
self.default_view.root = viewpath
|
||||
else:
|
||||
self.views[name] = ViewDescriptor(viewpath)
|
||||
self.views[name] = ViewDescriptor(self.path, viewpath)
|
||||
else:
|
||||
self.views.pop(name, None)
|
||||
|
||||
@@ -1111,6 +1149,24 @@ def regenerate_views(self):
|
||||
for view in self.views.values():
|
||||
view.regenerate(specs, self.roots())
|
||||
|
||||
def check_views(self):
|
||||
"""Checks if the environments default view can be activated."""
|
||||
try:
|
||||
# This is effectively a no-op, but it touches all packages in the
|
||||
# default view if they are installed.
|
||||
for view_name, view in self.views.items():
|
||||
for _, spec in self.concretized_specs():
|
||||
if spec in view and spec.package.installed:
|
||||
tty.debug(
|
||||
'Spec %s in view %s' % (spec.name, view_name))
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn(
|
||||
'Environment %s includes out of date packages or repos. '
|
||||
'Loading the environment view will require reconcretization.'
|
||||
% self.name)
|
||||
|
||||
def _env_modifications_for_default_view(self, reverse=False):
|
||||
all_mods = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
@@ -1418,6 +1474,18 @@ def write(self, regenerate_views=True):
|
||||
writing if True.
|
||||
|
||||
"""
|
||||
# Intercept environment not using the latest schema format and prevent
|
||||
# them from being modified
|
||||
manifest_exists = os.path.exists(self.manifest_path)
|
||||
if manifest_exists and not is_latest_format(self.manifest_path):
|
||||
msg = ('The environment "{0}" needs to be written to disk, but '
|
||||
'is currently using a deprecated format. Please update it '
|
||||
'using:\n\n'
|
||||
'\tspack env update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
'use the updated configuration.')
|
||||
raise RuntimeError(msg.format(self.name))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
|
||||
@@ -1445,13 +1513,26 @@ def write(self, regenerate_views=True):
|
||||
# write the lock file last
|
||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||
self._update_and_write_manifest(raw_yaml_dict, yaml_dict)
|
||||
else:
|
||||
if os.path.exists(self.lock_path):
|
||||
os.unlink(self.lock_path)
|
||||
with fs.safe_remove(self.lock_path):
|
||||
self._update_and_write_manifest(raw_yaml_dict, yaml_dict)
|
||||
|
||||
# TODO: rethink where this needs to happen along with
|
||||
# writing. For some of the commands (like install, which write
|
||||
# concrete specs AND regen) this might as well be a separate
|
||||
# call. But, having it here makes the views consistent witht the
|
||||
# concretized environment for most operations. Which is the
|
||||
# special case?
|
||||
if regenerate_views:
|
||||
self.regenerate_views()
|
||||
|
||||
def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
|
||||
"""Update YAML manifest for this environment based on changes to
|
||||
spec lists and views and write it.
|
||||
"""
|
||||
# invalidate _repo cache
|
||||
self._repo = None
|
||||
|
||||
# put any changes in the definitions in the YAML
|
||||
for name, speclist in self.spec_lists.items():
|
||||
if name == user_speclist_name:
|
||||
@@ -1478,21 +1559,20 @@ def write(self, regenerate_views=True):
|
||||
for ayl in active_yaml_lists)]
|
||||
list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
|
||||
list_for_new_specs[:] = list_for_new_specs + new_specs
|
||||
|
||||
# put the new user specs in the YAML.
|
||||
# This can be done directly because there can't be multiple definitions
|
||||
# nor when clauses for `specs` list.
|
||||
yaml_spec_list = yaml_dict.setdefault(user_speclist_name,
|
||||
[])
|
||||
yaml_spec_list[:] = self.user_specs.yaml_list
|
||||
|
||||
# Construct YAML representation of view
|
||||
default_name = default_view_name
|
||||
if self.views and len(self.views) == 1 and default_name in self.views:
|
||||
path = self.default_view.root
|
||||
if self.default_view == ViewDescriptor(self.view_path_default):
|
||||
if self.default_view == ViewDescriptor(self.path,
|
||||
self.view_path_default):
|
||||
view = True
|
||||
elif self.default_view == ViewDescriptor(path):
|
||||
elif self.default_view == ViewDescriptor(self.path, path):
|
||||
view = path
|
||||
else:
|
||||
view = dict((name, view.to_dict())
|
||||
@@ -1502,9 +1582,7 @@ def write(self, regenerate_views=True):
|
||||
for name, view in self.views.items())
|
||||
else:
|
||||
view = False
|
||||
|
||||
yaml_dict['view'] = view
|
||||
|
||||
# Remove yaml sections that are shadowing defaults
|
||||
# construct garbage path to ensure we don't find a manifest by accident
|
||||
with fs.temp_cwd() as env_dir:
|
||||
@@ -1514,7 +1592,6 @@ def write(self, regenerate_views=True):
|
||||
if yaml_dict[key] == config_dict(bare_env.yaml).get(key, None):
|
||||
if key not in raw_yaml_dict:
|
||||
del yaml_dict[key]
|
||||
|
||||
# if all that worked, write out the manifest file at the top level
|
||||
# (we used to check whether the yaml had changed and not write it out
|
||||
# if it hadn't. We can't do that anymore because it could be the only
|
||||
@@ -1528,15 +1605,6 @@ def write(self, regenerate_views=True):
|
||||
with fs.write_tmp_and_move(self.manifest_path) as f:
|
||||
_write_yaml(self.yaml, f)
|
||||
|
||||
# TODO: rethink where this needs to happen along with
|
||||
# writing. For some of the commands (like install, which write
|
||||
# concrete specs AND regen) this might as well be a separate
|
||||
# call. But, having it here makes the views consistent witht the
|
||||
# concretized environment for most operations. Which is the
|
||||
# special case?
|
||||
if regenerate_views:
|
||||
self.regenerate_views()
|
||||
|
||||
def __enter__(self):
|
||||
self._previous_active = _active_environment
|
||||
activate(self)
|
||||
@@ -1667,5 +1735,92 @@ def deactivate_config_scope(env):
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
"""Return the absolute path to a manifest file given the environment
|
||||
name or directory.
|
||||
|
||||
Args:
|
||||
env_name_or_dir (str): either the name of a valid environment
|
||||
or a directory where a manifest file resides
|
||||
|
||||
Raises:
|
||||
AssertionError: if the environment is not found
|
||||
"""
|
||||
env_dir = None
|
||||
if is_env_dir(env_name_or_dir):
|
||||
env_dir = os.path.abspath(env_name_or_dir)
|
||||
elif exists(env_name_or_dir):
|
||||
env_dir = os.path.abspath(root(env_name_or_dir))
|
||||
|
||||
assert env_dir, "environment not found [env={0}]".format(env_name_or_dir)
|
||||
return os.path.join(env_dir, manifest_name)
|
||||
|
||||
|
||||
def update_yaml(manifest, backup_file):
|
||||
"""Update a manifest file from an old format to the current one.
|
||||
|
||||
Args:
|
||||
manifest (str): path to a manifest file
|
||||
backup_file (str): file where to copy the original manifest
|
||||
|
||||
Returns:
|
||||
True if the manifest was updated, False otherwise.
|
||||
|
||||
Raises:
|
||||
AssertionError: in case anything goes wrong during the update
|
||||
"""
|
||||
# Check if the environment needs update
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
|
||||
top_level_key = _top_level_key(data)
|
||||
needs_update = spack.schema.env.update(data[top_level_key])
|
||||
if not needs_update:
|
||||
msg = "No update needed [manifest={0}]".format(manifest)
|
||||
tty.debug(msg)
|
||||
return False
|
||||
|
||||
# Copy environment to a backup file and update it
|
||||
msg = ('backup file "{0}" already exists on disk. Check its content '
|
||||
'and remove it before trying to update again.')
|
||||
assert not os.path.exists(backup_file), msg.format(backup_file)
|
||||
|
||||
shutil.copy(manifest, backup_file)
|
||||
with open(manifest, 'w') as f:
|
||||
syaml.dump_config(data, f)
|
||||
return True
|
||||
|
||||
|
||||
def _top_level_key(data):
|
||||
"""Return the top level key used in this environment
|
||||
|
||||
Args:
|
||||
data (dict): raw yaml data of the environment
|
||||
|
||||
Returns:
|
||||
Either 'spack' or 'env'
|
||||
"""
|
||||
msg = ('cannot find top level attribute "spack" or "env"'
|
||||
'in the environment')
|
||||
assert any(x in data for x in ('spack', 'env')), msg
|
||||
if 'spack' in data:
|
||||
return 'spack'
|
||||
return 'env'
|
||||
|
||||
|
||||
def is_latest_format(manifest):
|
||||
"""Return True if the manifest file is at the latest schema format,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
return not changed
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
"""Superclass for all errors to do with Spack environments."""
|
||||
|
||||
@@ -289,30 +289,45 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
tty.debug('Already downloaded {0}'.format(self.archive_file))
|
||||
return
|
||||
|
||||
url = None
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
if not self._existing_url(url):
|
||||
continue
|
||||
|
||||
try:
|
||||
partial_file, save_file = self._fetch_from_url(url)
|
||||
if save_file:
|
||||
os.rename(partial_file, save_file)
|
||||
break
|
||||
except FetchError as e:
|
||||
tty.msg(str(e))
|
||||
pass
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(url)
|
||||
|
||||
def _existing_url(self, url):
|
||||
tty.debug('Checking existence of {0}'.format(url))
|
||||
curl = self.curl
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url]
|
||||
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
|
||||
return curl.returncode == 0
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
save_file = None
|
||||
partial_file = None
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + '.part'
|
||||
tty.msg("Fetching %s" % url)
|
||||
tty.msg('Fetching {0}'.format(url))
|
||||
if partial_file:
|
||||
save_args = ['-C',
|
||||
'-', # continue partial downloads
|
||||
@@ -335,7 +350,7 @@ def _fetch_from_url(self, url):
|
||||
if sys.stdout.isatty() and tty.msg_enabled():
|
||||
curl_args.append('-#') # status bar when using a tty
|
||||
else:
|
||||
curl_args.append('-sS') # just errors when not.
|
||||
curl_args.append('-sS') # show errors if fail
|
||||
|
||||
connect_timeout = spack.config.get('config:connect_timeout', 10)
|
||||
|
||||
@@ -412,8 +427,8 @@ def cachable(self):
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
if not self.expand_archive:
|
||||
tty.msg("Staging unexpanded archive %s in %s" % (
|
||||
self.archive_file, self.stage.source_path))
|
||||
tty.debug('Staging unexpanded archive {0} in {1}'
|
||||
.format(self.archive_file, self.stage.source_path))
|
||||
if not self.stage.expanded:
|
||||
mkdirp(self.stage.source_path)
|
||||
dest = os.path.join(self.stage.source_path,
|
||||
@@ -421,7 +436,7 @@ def expand(self):
|
||||
shutil.move(self.archive_file, dest)
|
||||
return
|
||||
|
||||
tty.msg("Staging archive: %s" % self.archive_file)
|
||||
tty.debug('Staging archive: {0}'.format(self.archive_file))
|
||||
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
@@ -564,7 +579,7 @@ def fetch(self):
|
||||
raise
|
||||
|
||||
# Notify the user how we fetched.
|
||||
tty.msg('Using cached archive: %s' % path)
|
||||
tty.msg('Using cached archive: {0}'.format(path))
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
@@ -594,7 +609,8 @@ def __init__(self, **kwargs):
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
tty.msg("No checksum needed when fetching with %s" % self.url_attr)
|
||||
tty.debug('No checksum needed when fetching with {0}'
|
||||
.format(self.url_attr))
|
||||
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
@@ -672,7 +688,7 @@ def go(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
tty.msg("Getting go resource:", self.url)
|
||||
tty.debug('Getting go resource: {0}'.format(self.url))
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
@@ -788,10 +804,10 @@ def _repo_info(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched {0}".format(self.stage.source_path))
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
tty.msg("Cloning git repository: {0}".format(self._repo_info()))
|
||||
tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
if self.commit:
|
||||
@@ -959,10 +975,10 @@ def mirror_id(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
tty.msg("Checking out subversion repository: %s" % self.url)
|
||||
tty.debug('Checking out subversion repository: {0}'.format(self.url))
|
||||
|
||||
args = ['checkout', '--force', '--quiet']
|
||||
if self.revision:
|
||||
@@ -1068,13 +1084,14 @@ def mirror_id(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
args = []
|
||||
if self.revision:
|
||||
args.append('at revision %s' % self.revision)
|
||||
tty.msg("Cloning mercurial repository:", self.url, *args)
|
||||
tty.debug('Cloning mercurial repository: {0} {1}'
|
||||
.format(self.url, args))
|
||||
|
||||
args = ['clone']
|
||||
|
||||
@@ -1130,7 +1147,7 @@ def __init__(self, *args, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
tty.debug('Already downloaded {0}'.format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = url_util.parse(self.url)
|
||||
@@ -1138,7 +1155,7 @@ def fetch(self):
|
||||
raise FetchError(
|
||||
'S3FetchStrategy can only fetch from s3:// urls.')
|
||||
|
||||
tty.msg("Fetching %s" % self.url)
|
||||
tty.debug('Fetching {0}'.format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
|
||||
264
lib/spack/spack/install_test.py
Normal file
264
lib/spack/spack/install_test.py
Normal file
@@ -0,0 +1,264 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tty
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
from spack.spec import Spec
|
||||
|
||||
import spack.error
|
||||
import spack.util.prefix
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
|
||||
test_suite_filename = 'test_suite.lock'
|
||||
results_filename = 'results.txt'
|
||||
|
||||
|
||||
def get_escaped_text_output(filename):
|
||||
"""Retrieve and escape the expected text output from the file
|
||||
|
||||
Args:
|
||||
filename (str): path to the file
|
||||
|
||||
Returns:
|
||||
(list of str): escaped text lines read from the file
|
||||
"""
|
||||
with open(filename, 'r') as f:
|
||||
# Ensure special characters are escaped as needed
|
||||
expected = f.read()
|
||||
|
||||
# Split the lines to make it easier to debug failures when there is
|
||||
# a lot of output
|
||||
return [re.escape(ln) for ln in expected.split('\n')]
|
||||
|
||||
|
||||
def get_test_stage_dir():
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get('config:test_stage', '~/.spack/test'))
|
||||
|
||||
|
||||
def get_all_test_suites():
|
||||
stage_root = get_test_stage_dir()
|
||||
|
||||
def valid_stage(d):
|
||||
dirpath = os.path.join(stage_root, d)
|
||||
return (os.path.isdir(dirpath) and
|
||||
test_suite_filename in os.listdir(dirpath))
|
||||
|
||||
candidates = [
|
||||
os.path.join(stage_root, d, test_suite_filename)
|
||||
for d in os.listdir(stage_root)
|
||||
if valid_stage(d)
|
||||
]
|
||||
|
||||
test_suites = [TestSuite.from_file(c) for c in candidates]
|
||||
return test_suites
|
||||
|
||||
|
||||
def get_test_suite(name):
|
||||
assert name, "Cannot search for empty test name or 'None'"
|
||||
test_suites = get_all_test_suites()
|
||||
names = [ts for ts in test_suites
|
||||
if ts.name == name]
|
||||
assert len(names) < 2, "alias shadows test suite hash"
|
||||
|
||||
if not names:
|
||||
return None
|
||||
return names[0]
|
||||
|
||||
|
||||
class TestSuite(object):
|
||||
def __init__(self, specs, alias=None):
|
||||
# copy so that different test suites have different package objects
|
||||
# even if they contain the same spec
|
||||
self.specs = [spec.copy() for spec in specs]
|
||||
self.current_test_spec = None # spec currently tested, can be virtual
|
||||
self.current_base_spec = None # spec currently running do_test
|
||||
|
||||
self.alias = alias
|
||||
self._hash = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.alias if self.alias else self.content_hash
|
||||
|
||||
@property
|
||||
def content_hash(self):
|
||||
if not self._hash:
|
||||
json_text = sjson.dump(self.to_dict())
|
||||
sha = hashlib.sha1(json_text.encode('utf-8'))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
if sys.version_info[0] >= 3:
|
||||
b32_hash = b32_hash.decode('utf-8')
|
||||
self._hash = b32_hash
|
||||
return self._hash
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.write_reproducibility_data()
|
||||
|
||||
remove_directory = kwargs.get('remove_directory', True)
|
||||
dirty = kwargs.get('dirty', False)
|
||||
fail_first = kwargs.get('fail_first', False)
|
||||
|
||||
for spec in self.specs:
|
||||
try:
|
||||
msg = "A package object cannot run in two test suites at once"
|
||||
assert not spec.package.test_suite, msg
|
||||
|
||||
# Set up the test suite to know which test is running
|
||||
spec.package.test_suite = self
|
||||
self.current_base_spec = spec
|
||||
self.current_test_spec = spec
|
||||
|
||||
# setup per-test directory in the stage dir
|
||||
test_dir = self.test_dir_for_spec(spec)
|
||||
if os.path.exists(test_dir):
|
||||
shutil.rmtree(test_dir)
|
||||
fs.mkdirp(test_dir)
|
||||
|
||||
# run the package tests
|
||||
spec.package.do_test(
|
||||
dirty=dirty
|
||||
)
|
||||
|
||||
# Clean up on success and log passed test
|
||||
if remove_directory:
|
||||
shutil.rmtree(test_dir)
|
||||
self.write_test_result(spec, 'PASSED')
|
||||
except BaseException as exc:
|
||||
if isinstance(exc, SyntaxError):
|
||||
# Create the test log file and report the error.
|
||||
self.ensure_stage()
|
||||
msg = 'Testing package {0}\n{1}'\
|
||||
.format(self.test_pkg_id(spec), str(exc))
|
||||
_add_msg_to_file(self.log_file_for_spec(spec), msg)
|
||||
|
||||
self.write_test_result(spec, 'FAILED')
|
||||
if fail_first:
|
||||
break
|
||||
finally:
|
||||
spec.package.test_suite = None
|
||||
self.current_test_spec = None
|
||||
self.current_base_spec = None
|
||||
|
||||
def ensure_stage(self):
|
||||
if not os.path.exists(self.stage):
|
||||
fs.mkdirp(self.stage)
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return spack.util.prefix.Prefix(
|
||||
os.path.join(get_test_stage_dir(), self.content_hash))
|
||||
|
||||
@property
|
||||
def results_file(self):
|
||||
return self.stage.join(results_filename)
|
||||
|
||||
@classmethod
|
||||
def test_pkg_id(cls, spec):
|
||||
"""Build the standard install test package identifier
|
||||
|
||||
Args:
|
||||
spec (Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
(str): the install test package identifier
|
||||
"""
|
||||
return spec.format('{name}-{version}-{hash:7}')
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
return '%s-test-out.txt' % cls.test_pkg_id(spec)
|
||||
|
||||
def log_file_for_spec(self, spec):
|
||||
return self.stage.join(self.test_log_name(spec))
|
||||
|
||||
def test_dir_for_spec(self, spec):
|
||||
return self.stage.join(self.test_pkg_id(spec))
|
||||
|
||||
@property
|
||||
def current_test_data_dir(self):
|
||||
assert self.current_test_spec and self.current_base_spec
|
||||
test_spec = self.current_test_spec
|
||||
base_spec = self.current_base_spec
|
||||
return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
|
||||
|
||||
def add_failure(self, exc, msg):
|
||||
current_hash = self.current_base_spec.dag_hash()
|
||||
current_failures = self.failures.get(current_hash, [])
|
||||
current_failures.append((exc, msg))
|
||||
self.failures[current_hash] = current_failures
|
||||
|
||||
def write_test_result(self, spec, result):
|
||||
msg = "{0} {1}".format(self.test_pkg_id(spec), result)
|
||||
_add_msg_to_file(self.results_file, msg)
|
||||
|
||||
def write_reproducibility_data(self):
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.path.dump_provenance(spec, repo_cache_path)
|
||||
for vspec in spec.package.virtuals_provided:
|
||||
repo_cache_path = self.stage.repo.join(vspec.name)
|
||||
if not os.path.exists(repo_cache_path):
|
||||
try:
|
||||
spack.repo.path.dump_provenance(vspec, repo_cache_path)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass # not all virtuals have package files
|
||||
|
||||
with open(self.stage.join(test_suite_filename), 'w') as f:
|
||||
sjson.dump(self.to_dict(), stream=f)
|
||||
|
||||
def to_dict(self):
|
||||
specs = [s.to_dict() for s in self.specs]
|
||||
d = {'specs': specs}
|
||||
if self.alias:
|
||||
d['alias'] = self.alias
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
specs = [Spec.from_dict(spec_dict) for spec_dict in d['specs']]
|
||||
alias = d.get('alias', None)
|
||||
return TestSuite(specs, alias)
|
||||
|
||||
@staticmethod
|
||||
def from_file(filename):
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
data = sjson.load(f)
|
||||
return TestSuite.from_dict(data)
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
raise sjson.SpackJSONError("error parsing JSON TestSuite:", str(e))
|
||||
|
||||
|
||||
def _add_msg_to_file(filename, msg):
|
||||
"""Add the message to the specified file
|
||||
|
||||
Args:
|
||||
filename (str): path to the file
|
||||
msg (str): message to be appended to the file
|
||||
"""
|
||||
with open(filename, 'a+') as f:
|
||||
f.write('{0}\n'.format(msg))
|
||||
|
||||
|
||||
class TestFailure(spack.error.SpackError):
|
||||
"""Raised when package tests have failed for an installation."""
|
||||
def __init__(self, failures):
|
||||
# Failures are all exceptions
|
||||
msg = "%d tests failed.\n" % len(failures)
|
||||
for failure, message in failures:
|
||||
msg += '\n\n%s\n' % str(failure)
|
||||
msg += '\n%s\n' % message
|
||||
|
||||
super(TestFailure, self).__init__(msg)
|
||||
@@ -215,18 +215,18 @@ def _hms(seconds):
|
||||
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
"""
|
||||
Install the package from binary cache
|
||||
Extract the package from binary cache
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): the package to install from the binary cache
|
||||
cache_only (bool): only install from binary cache
|
||||
cache_only (bool): only extract from binary cache
|
||||
explicit (bool): ``True`` if installing the package was explicitly
|
||||
requested by the user, otherwise, ``False``
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
|
||||
Return:
|
||||
(bool) ``True`` if the package was installed from binary cache,
|
||||
(bool) ``True`` if the package was extract from binary cache,
|
||||
``False`` otherwise
|
||||
"""
|
||||
installed_from_cache = _try_install_from_binary_cache(pkg, explicit,
|
||||
@@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
if cache_only:
|
||||
tty.die('{0} when cache-only specified'.format(pre))
|
||||
|
||||
tty.debug('{0}: installing from source'.format(pre))
|
||||
tty.msg('{0}: installing from source'.format(pre))
|
||||
return False
|
||||
|
||||
tty.debug('Successfully installed {0} from binary cache'.format(pkg_id))
|
||||
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
return True
|
||||
@@ -272,20 +272,20 @@ def _process_external_package(pkg, explicit):
|
||||
pre = '{s.name}@{s.version} :'.format(s=pkg.spec)
|
||||
spec = pkg.spec
|
||||
|
||||
if spec.external_module:
|
||||
if spec.external_modules:
|
||||
tty.msg('{0} has external module in {1}'
|
||||
.format(pre, spec.external_module))
|
||||
tty.msg('{0} is actually installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
.format(pre, spec.external_modules))
|
||||
tty.debug('{0} is actually installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
else:
|
||||
tty.msg("{0} externally installed in {1}"
|
||||
tty.msg('{0} externally installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
|
||||
try:
|
||||
# Check if the package was already registered in the DB.
|
||||
# If this is the case, then just exit.
|
||||
rec = spack.store.db.get_record(spec)
|
||||
tty.msg('{0} already registered in DB'.format(pre))
|
||||
tty.debug('{0} already registered in DB'.format(pre))
|
||||
|
||||
# Update the value of rec.explicit if it is necessary
|
||||
_update_explicit_entry_in_db(pkg, rec, explicit)
|
||||
@@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit):
|
||||
# If not, register it and generate the module file.
|
||||
# For external packages we just need to run
|
||||
# post-install hooks to generate module files.
|
||||
tty.msg('{0} generating module file'.format(pre))
|
||||
tty.debug('{0} generating module file'.format(pre))
|
||||
spack.hooks.post_install(spec)
|
||||
|
||||
# Add to the DB
|
||||
tty.msg('{0} registering into DB'.format(pre))
|
||||
tty.debug('{0} registering into DB'.format(pre))
|
||||
spack.store.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
@@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
otherwise, ``False``
|
||||
|
||||
Return:
|
||||
(bool) ``True`` if the package was installed from binary cache,
|
||||
(bool) ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
tarball = binary_distribution.download_tarball(binary_spec)
|
||||
@@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
return False
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.msg('Installing {0} from binary cache'.format(pkg_id))
|
||||
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
|
||||
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
|
||||
unsigned=unsigned, force=False)
|
||||
pkg.installed_from_binary_cache = True
|
||||
@@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
"""
|
||||
Try to install the package from binary cache.
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): the package to be installed from binary cache
|
||||
pkg (PackageBase): the package to be extracted from binary cache
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
@@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
|
||||
with spack.store.db.write_transaction():
|
||||
rec = spack.store.db.get_record(pkg.spec)
|
||||
message = '{s.name}@{s.version} : marking the package explicit'
|
||||
tty.msg(message.format(s=pkg.spec))
|
||||
tty.debug(message.format(s=pkg.spec))
|
||||
rec.explicit = True
|
||||
|
||||
|
||||
@@ -452,7 +452,8 @@ def install_msg(name, pid):
|
||||
Return:
|
||||
(str) Colorized installing message
|
||||
"""
|
||||
return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name)
|
||||
pre = '{0}: '.format(pid) if tty.show_pid() else ''
|
||||
return pre + colorize('@*{Installing} @*g{%s}' % name)
|
||||
|
||||
|
||||
def log(pkg):
|
||||
@@ -465,7 +466,6 @@ def log(pkg):
|
||||
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
|
||||
|
||||
# Remove first if we're overwriting another build
|
||||
# (can happen with spack setup)
|
||||
try:
|
||||
# log and env install paths are inside this
|
||||
shutil.rmtree(packages_dir)
|
||||
@@ -1057,11 +1057,15 @@ def _install_task(self, task, **kwargs):
|
||||
if use_cache and \
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([pkg.spec.prefix]))
|
||||
return
|
||||
|
||||
pkg.run_tests = (tests is True or tests and pkg.name in tests)
|
||||
|
||||
pre = '{0}: {1}:'.format(self.pid, pkg.name)
|
||||
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
|
||||
pre = '{0}{1}:'.format(pid, pkg.name)
|
||||
|
||||
def build_process():
|
||||
"""
|
||||
@@ -1080,8 +1084,8 @@ def build_process():
|
||||
pkg.do_stage()
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.msg('{0} Building {1} [{2}]'
|
||||
.format(pre, pkg_id, pkg.build_system_class))
|
||||
tty.debug('{0} Building {1} [{2}]'
|
||||
.format(pre, pkg_id, pkg.build_system_class))
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
echo = verbose
|
||||
@@ -1102,12 +1106,12 @@ def build_process():
|
||||
if install_source and os.path.isdir(source_path):
|
||||
src_target = os.path.join(pkg.spec.prefix, 'share',
|
||||
pkg.name, 'src')
|
||||
tty.msg('{0} Copying source to {1}'
|
||||
.format(pre, src_target))
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
tty.debug('{0} Copying source to {1}'
|
||||
.format(pre, src_target))
|
||||
fs.install_tree(source_path, src_target)
|
||||
|
||||
# Do the real install in the source directory.
|
||||
with fs.working_dir(pkg.stage.source_path):
|
||||
with fs.working_dir(source_path):
|
||||
# Save the build environment in a file before building.
|
||||
dump_environment(pkg.env_path)
|
||||
|
||||
@@ -1125,20 +1129,20 @@ def build_process():
|
||||
pass
|
||||
|
||||
# cache debug settings
|
||||
debug_enabled = tty.is_debug()
|
||||
debug_level = tty.debug_level()
|
||||
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path
|
||||
with log_output(pkg.log_path, echo, True) as logger:
|
||||
with log_output(pkg.log_path, echo=echo, debug=True) as logger:
|
||||
for phase_name, phase_attr in zip(
|
||||
pkg.phases, pkg._InstallPhase_phases):
|
||||
|
||||
with logger.force_echo():
|
||||
inner_debug = tty.is_debug()
|
||||
tty.set_debug(debug_enabled)
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
tty.msg("{0} Executing phase: '{1}'"
|
||||
.format(pre, phase_name))
|
||||
tty.set_debug(inner_debug)
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(pkg, phase_attr)
|
||||
@@ -1154,11 +1158,11 @@ def build_process():
|
||||
pkg._total_time = time.time() - start_time
|
||||
build_time = pkg._total_time - pkg._fetch_time
|
||||
|
||||
tty.msg('{0} Successfully installed {1}'
|
||||
.format(pre, pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(pkg._fetch_time), _hms(build_time),
|
||||
_hms(pkg._total_time)))
|
||||
tty.debug('{0} Successfully installed {1}'
|
||||
.format(pre, pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(pkg._fetch_time), _hms(build_time),
|
||||
_hms(pkg._total_time)))
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
|
||||
# preserve verbosity across runs
|
||||
@@ -1189,7 +1193,8 @@ def build_process():
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
tty.debug('{0} {1}'.format(self.pid, str(e)))
|
||||
pre = '{0}'.format(self.pid) if tty.show_pid() else ''
|
||||
tty.debug('{0}{1}'.format(pid, str(e)))
|
||||
tty.debug('Package stage directory : {0}'
|
||||
.format(pkg.stage.source_path))
|
||||
|
||||
@@ -1562,9 +1567,14 @@ def install(self, **kwargs):
|
||||
except (Exception, SystemExit) as exc:
|
||||
# Best effort installs suppress the exception and mark the
|
||||
# package as a failure UNLESS this is the explicit package.
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
if (not isinstance(exc, spack.error.SpackError) or
|
||||
not exc.printed):
|
||||
# SpackErrors can be printed by the build process or at
|
||||
# lower levels -- skip printing if already printed.
|
||||
# TODO: sort out this and SpackEror.print_context()
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(
|
||||
err.format(pkg.name, exc.__class__.__name__, str(exc)))
|
||||
|
||||
self._update_failed(task, True, exc)
|
||||
|
||||
|
||||
@@ -128,8 +128,8 @@ def get_version():
|
||||
git = exe.which("git")
|
||||
if git:
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
desc = git(
|
||||
"describe", "--tags", output=str, fail_on_error=False)
|
||||
desc = git("describe", "--tags", "--match", "v*",
|
||||
output=str, error=os.devnull, fail_on_error=False)
|
||||
|
||||
if git.returncode == 0:
|
||||
match = re.match(r"v([^-]+)-([^-]+)-g([a-f\d]+)", desc)
|
||||
@@ -281,7 +281,7 @@ def add_subcommand_group(title, commands):
|
||||
spack help --all list all commands and options
|
||||
spack help <command> help on a specific command
|
||||
spack help --spec help on the package specification syntax
|
||||
spack docs open http://spack.rtfd.io/ in a browser
|
||||
spack docs open https://spack.rtfd.io/ in a browser
|
||||
""".format(help=section_descriptions['help']))
|
||||
|
||||
# determine help from format above
|
||||
@@ -362,8 +362,9 @@ def make_argument_parser(**kwargs):
|
||||
'-C', '--config-scope', dest='config_scopes', action='append',
|
||||
metavar='DIR', help="add a custom configuration scope")
|
||||
parser.add_argument(
|
||||
'-d', '--debug', action='store_true',
|
||||
help="write out debug logs during compile")
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -438,7 +439,7 @@ def setup_main_options(args):
|
||||
tty.set_debug(args.debug)
|
||||
tty.set_stacktrace(args.stacktrace)
|
||||
|
||||
# debug must be set first so that it can even affect behvaior of
|
||||
# debug must be set first so that it can even affect behavior of
|
||||
# errors raised by spack.config.
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
@@ -701,15 +702,15 @@ def main(argv=None):
|
||||
if stored_var_name in os.environ:
|
||||
os.environ[var] = os.environ[stored_var_name]
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
if not args.no_env:
|
||||
env = ev.find_environment(args)
|
||||
if env:
|
||||
ev.activate(env, args.use_env_repo)
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
ev.activate(env, args.use_env_repo, add_view=False)
|
||||
|
||||
if args.print_shell_vars:
|
||||
print_setup_info(*args.print_shell_vars.split(','))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user