Compare commits
843 Commits
e4s-22.08
...
cws/config
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93b14e6c19 | ||
|
|
b44c83429c | ||
|
|
8e8d6e5adb | ||
|
|
b21e54dffa | ||
|
|
fe2656f182 | ||
|
|
0b014ff9cd | ||
|
|
dd003f66a8 | ||
|
|
9d11b96e4b | ||
|
|
47bfc60845 | ||
|
|
9b87b4c8cd | ||
|
|
c0361168a5 | ||
|
|
da6aeaad44 | ||
|
|
fb090a69f4 | ||
|
|
2e55812417 | ||
|
|
e7b14dd491 | ||
|
|
839cf48352 | ||
|
|
39105a3a6f | ||
|
|
9081871966 | ||
|
|
ad430a7504 | ||
|
|
db342d8727 | ||
|
|
32761cdb7b | ||
|
|
29df7e9be3 | ||
|
|
ea80113d0f | ||
|
|
4fe53061a8 | ||
|
|
bfe49222d5 | ||
|
|
42a27f3075 | ||
|
|
e882583b01 | ||
|
|
1be6506e29 | ||
|
|
25e35c936b | ||
|
|
2c802c12a5 | ||
|
|
f3523d8655 | ||
|
|
84fbccd682 | ||
|
|
8dc2d37447 | ||
|
|
9933a9046a | ||
|
|
f3ebe237e5 | ||
|
|
7993d10e54 | ||
|
|
574ab3e40a | ||
|
|
1338dbca56 | ||
|
|
02fb32bc1e | ||
|
|
25e4d48227 | ||
|
|
7547f1c414 | ||
|
|
0c505e459b | ||
|
|
23fe981c41 | ||
|
|
f7f11fc881 | ||
|
|
496f4193a6 | ||
|
|
10491e98a8 | ||
|
|
898c0b45fb | ||
|
|
b2d7782b00 | ||
|
|
2f6a56a43b | ||
|
|
31cda96181 | ||
|
|
19226ecc49 | ||
|
|
2a166a5cc4 | ||
|
|
a661193c64 | ||
|
|
9546eadd98 | ||
|
|
89b3e6c6d0 | ||
|
|
6983d520fa | ||
|
|
c44934a44d | ||
|
|
67bc90acb7 | ||
|
|
0de1f98920 | ||
|
|
c13381fab3 | ||
|
|
d5ebb55338 | ||
|
|
7da303334e | ||
|
|
43dd34b651 | ||
|
|
00ea25061f | ||
|
|
75f71d3f81 | ||
|
|
f74742b834 | ||
|
|
599480ae9a | ||
|
|
feb1f3aadb | ||
|
|
8ce1574e0c | ||
|
|
7f24ab9b0a | ||
|
|
5167eed558 | ||
|
|
3014caa586 | ||
|
|
6ac1f445ec | ||
|
|
be93b27ffc | ||
|
|
9c6c296474 | ||
|
|
e20d45f3bc | ||
|
|
0c4ad440c6 | ||
|
|
93be19d0e3 | ||
|
|
4c151e0387 | ||
|
|
0af5838581 | ||
|
|
828fddacf1 | ||
|
|
268a43762c | ||
|
|
bd263b71da | ||
|
|
f76a3a1a73 | ||
|
|
5eeb81c253 | ||
|
|
4ddf011c56 | ||
|
|
f5704fff69 | ||
|
|
a8b1314d18 | ||
|
|
601c727491 | ||
|
|
40c400441a | ||
|
|
a2dee76310 | ||
|
|
f24c135383 | ||
|
|
5009e3d94a | ||
|
|
8dbdfbd1eb | ||
|
|
042fcc3575 | ||
|
|
48da17d18e | ||
|
|
4765309b97 | ||
|
|
d52eef5b16 | ||
|
|
a227fec4b9 | ||
|
|
949151aff3 | ||
|
|
008b13f676 | ||
|
|
acd4787a1a | ||
|
|
4ee22e7cf7 | ||
|
|
8537220b0e | ||
|
|
c85faaa216 | ||
|
|
dc39edb790 | ||
|
|
f9620950cd | ||
|
|
f10997a0df | ||
|
|
2df25d8b37 | ||
|
|
e31a4b6dc6 | ||
|
|
827e576c3d | ||
|
|
549f6361ce | ||
|
|
c3cc462a69 | ||
|
|
bffc4ab826 | ||
|
|
fffa9258c5 | ||
|
|
a34b36703a | ||
|
|
7efbd7d8eb | ||
|
|
bd5705b2a8 | ||
|
|
f4cc48286d | ||
|
|
caf8b57fd4 | ||
|
|
9342344f78 | ||
|
|
552908595e | ||
|
|
9cd47454f3 | ||
|
|
cbe2178e3f | ||
|
|
7d4fa0ea00 | ||
|
|
7f49cc2d17 | ||
|
|
a43ad2d876 | ||
|
|
401412f999 | ||
|
|
019463be39 | ||
|
|
cd74e091d0 | ||
|
|
5844c24ca8 | ||
|
|
d1fb82a2c4 | ||
|
|
926dca9e5f | ||
|
|
4b866e8ffc | ||
|
|
5d0f0914b8 | ||
|
|
de8c827983 | ||
|
|
b594c0aee0 | ||
|
|
8e3c088a7a | ||
|
|
a0489d8480 | ||
|
|
a587bff119 | ||
|
|
c885b591e2 | ||
|
|
771aee30ea | ||
|
|
87536ab107 | ||
|
|
acf6acc93c | ||
|
|
cecec254b0 | ||
|
|
c7472c849f | ||
|
|
be293ceb7a | ||
|
|
4a9790e8cd | ||
|
|
cd6ef2c3cb | ||
|
|
042050be11 | ||
|
|
46b7d3995c | ||
|
|
c393a57a48 | ||
|
|
9d89dba292 | ||
|
|
a05a34361a | ||
|
|
dc141f5ad6 | ||
|
|
634941a1c3 | ||
|
|
22ea4aa210 | ||
|
|
83916961da | ||
|
|
dcf157d3a9 | ||
|
|
831d7979ca | ||
|
|
14f6de9bf2 | ||
|
|
c777380569 | ||
|
|
0835b69771 | ||
|
|
d886700de5 | ||
|
|
15dddee8f6 | ||
|
|
75cb7cefc1 | ||
|
|
c61dad1c25 | ||
|
|
ee2ece3c91 | ||
|
|
8829fb7c03 | ||
|
|
ef4c7474e5 | ||
|
|
5f642ff2d6 | ||
|
|
41f992a2f8 | ||
|
|
c453d8718b | ||
|
|
5b3e8a46b3 | ||
|
|
8d9a035d12 | ||
|
|
cbc867a24c | ||
|
|
86aaede202 | ||
|
|
8da82ebf69 | ||
|
|
fc23b48804 | ||
|
|
f915f9db32 | ||
|
|
27cf8dddec | ||
|
|
7cb745b03a | ||
|
|
bfbd411091 | ||
|
|
2a43571a68 | ||
|
|
46239ea525 | ||
|
|
01ede3c595 | ||
|
|
4a6aff8bd1 | ||
|
|
5d8e97af2a | ||
|
|
48f5f8eb17 | ||
|
|
01a54dd616 | ||
|
|
e5414ed9cc | ||
|
|
20453622a0 | ||
|
|
4a3e3807a3 | ||
|
|
c60dffaea7 | ||
|
|
17898a61dd | ||
|
|
2d28274387 | ||
|
|
918ed5f328 | ||
|
|
6f79dc654c | ||
|
|
6c2df00443 | ||
|
|
3146f9309c | ||
|
|
c1440aaa17 | ||
|
|
5ca32d82e4 | ||
|
|
dfbeaff8ae | ||
|
|
8309ae08d1 | ||
|
|
4bc8f66388 | ||
|
|
8a5790514d | ||
|
|
01e7b89b53 | ||
|
|
041c1486f8 | ||
|
|
cbc224852b | ||
|
|
5560bbf97e | ||
|
|
b3cfcebf94 | ||
|
|
f3027fb561 | ||
|
|
af4134dd48 | ||
|
|
3270df735f | ||
|
|
a2d7776c95 | ||
|
|
a4651a2a02 | ||
|
|
910cf7fe7b | ||
|
|
fb0d8cd151 | ||
|
|
c5e3ec8b1f | ||
|
|
e81ecae3b5 | ||
|
|
ce7461a783 | ||
|
|
f141f806e9 | ||
|
|
3968263bf6 | ||
|
|
9de9d2f65b | ||
|
|
163242bd6e | ||
|
|
35bc158387 | ||
|
|
39fe4371fa | ||
|
|
2346544e6f | ||
|
|
8acb4da6aa | ||
|
|
837954729f | ||
|
|
23f6c92f33 | ||
|
|
a661536eb6 | ||
|
|
080c37046f | ||
|
|
f56ff16564 | ||
|
|
1a12ddbd2d | ||
|
|
f43887dd4e | ||
|
|
a520a7ef28 | ||
|
|
c3018f95ee | ||
|
|
164d5fc7a4 | ||
|
|
38b50079e1 | ||
|
|
e67a19cb36 | ||
|
|
7e1cb5414c | ||
|
|
5c5de3e683 | ||
|
|
ca0e023c43 | ||
|
|
70a3868168 | ||
|
|
f168a44fcb | ||
|
|
c959d6c905 | ||
|
|
4cfe58651a | ||
|
|
a56cd8ffb6 | ||
|
|
5cf05b6515 | ||
|
|
fc5da74998 | ||
|
|
0bc23d8bdc | ||
|
|
5fc0bef4fc | ||
|
|
0184f008f1 | ||
|
|
e8dcfcd7ae | ||
|
|
bfd848089f | ||
|
|
887dd3fcd9 | ||
|
|
96daaa359d | ||
|
|
1f5729644b | ||
|
|
aab5bcf05a | ||
|
|
0ae46519ba | ||
|
|
270a19504b | ||
|
|
6de5f58026 | ||
|
|
33b1425add | ||
|
|
071c323b95 | ||
|
|
28de7da0cc | ||
|
|
791776cece | ||
|
|
9ba102c2bb | ||
|
|
c5d62294b2 | ||
|
|
189baa96db | ||
|
|
4b17d9b92e | ||
|
|
21ca17a157 | ||
|
|
5b45ffb353 | ||
|
|
143faeee0e | ||
|
|
b4df99376d | ||
|
|
b2c2958acc | ||
|
|
881571c4ba | ||
|
|
aef6d4a40a | ||
|
|
760294b402 | ||
|
|
417760e829 | ||
|
|
e0b418f288 | ||
|
|
1fe8387d23 | ||
|
|
d956da95c3 | ||
|
|
4ed963dda1 | ||
|
|
cefc1dc808 | ||
|
|
af02dcbd2e | ||
|
|
1800684ac1 | ||
|
|
82cfa68e69 | ||
|
|
27074d5bec | ||
|
|
9fe315b953 | ||
|
|
592d97137a | ||
|
|
9abee2e851 | ||
|
|
e5da747d6f | ||
|
|
f463666f0e | ||
|
|
6c12630e95 | ||
|
|
53cea629b7 | ||
|
|
cfea21319f | ||
|
|
e2b5179060 | ||
|
|
4e5ea86b20 | ||
|
|
e69c8338a4 | ||
|
|
15e0e15c90 | ||
|
|
316b620a05 | ||
|
|
153206be00 | ||
|
|
f677855e7d | ||
|
|
d1fe67b0bc | ||
|
|
fd911e7b2e | ||
|
|
5f59821433 | ||
|
|
b57d24a5e1 | ||
|
|
8e1de16193 | ||
|
|
7fdfdea0c7 | ||
|
|
4bd537574b | ||
|
|
8c50b44bfe | ||
|
|
450a3074e2 | ||
|
|
7f2e204e20 | ||
|
|
bf7512e25a | ||
|
|
aaf6b1fd29 | ||
|
|
30c2d2765c | ||
|
|
c03979c74b | ||
|
|
8af1802bd9 | ||
|
|
abbdf24083 | ||
|
|
93cd84c922 | ||
|
|
7daab390b3 | ||
|
|
916b21bfb4 | ||
|
|
977c89cee1 | ||
|
|
9225f4f27f | ||
|
|
ffc40a0fdb | ||
|
|
241b4624bc | ||
|
|
a51a81655a | ||
|
|
db1e32623f | ||
|
|
6e86daf470 | ||
|
|
25c1ef1e57 | ||
|
|
8e60b3932c | ||
|
|
8227a221e6 | ||
|
|
941eb8d297 | ||
|
|
e89b79d074 | ||
|
|
0fee3095e1 | ||
|
|
52e538e1d9 | ||
|
|
d4f05b0362 | ||
|
|
9728ddb0cd | ||
|
|
4924a9a28d | ||
|
|
4fb99912f1 | ||
|
|
7ee8fd5926 | ||
|
|
5a0f4970df | ||
|
|
fa7407093e | ||
|
|
1596191b27 | ||
|
|
deae0c48e4 | ||
|
|
a5a16ed5b3 | ||
|
|
c4429ad6ed | ||
|
|
918de81f5a | ||
|
|
8b2ed09832 | ||
|
|
4f5be6c17e | ||
|
|
cf0c4523f3 | ||
|
|
e31f8da021 | ||
|
|
2abbcaa49c | ||
|
|
24f9ec7dc0 | ||
|
|
d417d49690 | ||
|
|
bb510c7979 | ||
|
|
56a2cfd19d | ||
|
|
4b7836f822 | ||
|
|
a01c36da45 | ||
|
|
7a25f416b8 | ||
|
|
400a9f3df7 | ||
|
|
699f575976 | ||
|
|
2ea7703d69 | ||
|
|
679cd4b60d | ||
|
|
22054403e8 | ||
|
|
a78462988b | ||
|
|
a0147a1f07 | ||
|
|
19faeab84d | ||
|
|
42a230eef1 | ||
|
|
8d14f16246 | ||
|
|
dfecbbeeee | ||
|
|
1b343434c3 | ||
|
|
d9cab51fd7 | ||
|
|
3f1ebfd4fb | ||
|
|
eab148288a | ||
|
|
17d9960424 | ||
|
|
b37a1ec12b | ||
|
|
9515998deb | ||
|
|
2d0ff51185 | ||
|
|
7b365f4c43 | ||
|
|
7d50fd3b8e | ||
|
|
0e95f580a0 | ||
|
|
a70897c356 | ||
|
|
6fe89a4220 | ||
|
|
bc039524da | ||
|
|
77afad229c | ||
|
|
2214f34380 | ||
|
|
59415e6fc8 | ||
|
|
31d54da198 | ||
|
|
d6d40919b2 | ||
|
|
e7b0ef719d | ||
|
|
266453ce24 | ||
|
|
81f9a5b732 | ||
|
|
25a75ff9bf | ||
|
|
650a668a9d | ||
|
|
db2565cb53 | ||
|
|
63dca0c6cc | ||
|
|
51feed16db | ||
|
|
af1e62b0ac | ||
|
|
87b014ed13 | ||
|
|
d0136899a2 | ||
|
|
90b86a2266 | ||
|
|
67717c569e | ||
|
|
cb9f174a7f | ||
|
|
f0ec6a994c | ||
|
|
384ff70b0d | ||
|
|
9c1d6da111 | ||
|
|
0c49ee939b | ||
|
|
086dc66653 | ||
|
|
608d20446d | ||
|
|
b7a43bf515 | ||
|
|
ae627150b2 | ||
|
|
9f271fa388 | ||
|
|
43ceff4193 | ||
|
|
777271da18 | ||
|
|
de9fc038f7 | ||
|
|
b192e3492c | ||
|
|
f64ca7bc6a | ||
|
|
a077b2b0ee | ||
|
|
b6be1f3520 | ||
|
|
94435778ba | ||
|
|
f8e23b4fbe | ||
|
|
b1fda5dd64 | ||
|
|
5fbbc5fa68 | ||
|
|
494946d020 | ||
|
|
7c5a417bb3 | ||
|
|
beab39eb3c | ||
|
|
100eb5014a | ||
|
|
a5bf7f458d | ||
|
|
b961cfa8d6 | ||
|
|
07157306b2 | ||
|
|
3aa93ca79d | ||
|
|
b7926eb6c8 | ||
|
|
1f6545c1c7 | ||
|
|
677a862fb9 | ||
|
|
93dc500f41 | ||
|
|
ecce657509 | ||
|
|
30f6fd8dc0 | ||
|
|
a5ea566bdf | ||
|
|
5dc440ea92 | ||
|
|
d9b7bedaaa | ||
|
|
00753d49da | ||
|
|
5573ccee53 | ||
|
|
b721f8038f | ||
|
|
08371954bb | ||
|
|
3e4bf1e400 | ||
|
|
cd58ae23be | ||
|
|
184af723e4 | ||
|
|
c2aab98920 | ||
|
|
6ee7b5ad91 | ||
|
|
02e4d4e546 | ||
|
|
4751881fd1 | ||
|
|
3f28ef89cc | ||
|
|
ca62819261 | ||
|
|
4bfa61c149 | ||
|
|
2fa9aff206 | ||
|
|
e98e27ac3f | ||
|
|
86958669cf | ||
|
|
0a050785e9 | ||
|
|
8f5b847cb0 | ||
|
|
cbde650eed | ||
|
|
b18ab062cf | ||
|
|
4094e59ab8 | ||
|
|
0869d22fcb | ||
|
|
be38400ce2 | ||
|
|
2ad47c8ab2 | ||
|
|
83d55daae5 | ||
|
|
0ddbb92ae3 | ||
|
|
54d06fca79 | ||
|
|
457daf4be6 | ||
|
|
c6d7557484 | ||
|
|
e7f1f1af30 | ||
|
|
7e01a1252a | ||
|
|
d07b200b67 | ||
|
|
663f55825d | ||
|
|
dc214edc9c | ||
|
|
36df918e7d | ||
|
|
874364bf4d | ||
|
|
f12ececee5 | ||
|
|
1313bc99a6 | ||
|
|
e45bc8440b | ||
|
|
16692b802b | ||
|
|
fd66f55e3c | ||
|
|
4b66364b06 | ||
|
|
692f1d0e75 | ||
|
|
2f796e1119 | ||
|
|
ac459df2b6 | ||
|
|
d8e566d554 | ||
|
|
cc0e7c87c1 | ||
|
|
757a2db741 | ||
|
|
8409ec0f21 | ||
|
|
0d3a4d799a | ||
|
|
578d06a86a | ||
|
|
e656b4659a | ||
|
|
3b273e263b | ||
|
|
24245be85a | ||
|
|
71434d8b9d | ||
|
|
2518a55105 | ||
|
|
637a95c67f | ||
|
|
7e00852278 | ||
|
|
9b9691233a | ||
|
|
0405ed6e9b | ||
|
|
60997a7bc3 | ||
|
|
5479f26aa6 | ||
|
|
8a7343f97b | ||
|
|
5b26df8f64 | ||
|
|
83e66ce03e | ||
|
|
cc78d5db36 | ||
|
|
8f8205af88 | ||
|
|
13d870157f | ||
|
|
5d42185698 | ||
|
|
3ffc7f4281 | ||
|
|
7d250c4bea | ||
|
|
edee67d96f | ||
|
|
fc1e25c5e1 | ||
|
|
bb071d03ba | ||
|
|
0298823e28 | ||
|
|
51d2c05022 | ||
|
|
3762e7319d | ||
|
|
2f19826470 | ||
|
|
0f9cd73f58 | ||
|
|
17c16ac2fc | ||
|
|
0376a62458 | ||
|
|
d4c13b0f8f | ||
|
|
2c7c749986 | ||
|
|
9e0755ca3a | ||
|
|
0f26931628 | ||
|
|
a0c7209dc1 | ||
|
|
46d7ba9f78 | ||
|
|
31c24cd0d5 | ||
|
|
66b451a70d | ||
|
|
6f15d8ac76 | ||
|
|
123354e920 | ||
|
|
eaf3f7c17c | ||
|
|
dde5867d15 | ||
|
|
deca34676f | ||
|
|
7971985a06 | ||
|
|
ebb20eb8f8 | ||
|
|
045a5e80cb | ||
|
|
01c9780577 | ||
|
|
5ffde1d51c | ||
|
|
4453653418 | ||
|
|
f37e9addcd | ||
|
|
c1b3f0e02f | ||
|
|
ddd18351a1 | ||
|
|
ce56cb23fe | ||
|
|
8d2cd4a620 | ||
|
|
9543716d81 | ||
|
|
4ebdc5643e | ||
|
|
93c39464e3 | ||
|
|
cb323b1f55 | ||
|
|
9f72962dd1 | ||
|
|
9e5f3f96dd | ||
|
|
d6a3ffc301 | ||
|
|
4654d66905 | ||
|
|
463c5eacca | ||
|
|
83d6aff03a | ||
|
|
3b46a0bffe | ||
|
|
937b576b5b | ||
|
|
53a7b49619 | ||
|
|
251d86e5ab | ||
|
|
633a4cbd46 | ||
|
|
3e331c7397 | ||
|
|
e97915eef2 | ||
|
|
2d895a9ec3 | ||
|
|
62e788fb89 | ||
|
|
bc83c72ebe | ||
|
|
68115eb1d0 | ||
|
|
6e5dc7374c | ||
|
|
6a5d247d7c | ||
|
|
13d872592e | ||
|
|
5dc1a9f214 | ||
|
|
8dad297526 | ||
|
|
734ae99285 | ||
|
|
b4f3812077 | ||
|
|
f2b19c39a0 | ||
|
|
cb3b5fb716 | ||
|
|
e5dcc43b57 | ||
|
|
daf691fd07 | ||
|
|
3ff63b06bf | ||
|
|
acdb6321d1 | ||
|
|
8611aeff52 | ||
|
|
6c4acfbf83 | ||
|
|
d8e6782f42 | ||
|
|
f33b7c0a58 | ||
|
|
d4065e11c6 | ||
|
|
bb6c39ea7c | ||
|
|
46828eff1b | ||
|
|
5be1da4491 | ||
|
|
29093f13ec | ||
|
|
51ce370412 | ||
|
|
3c822cee58 | ||
|
|
7904e1d504 | ||
|
|
9d9f1ac816 | ||
|
|
bf84cdfcbe | ||
|
|
8071cc60cb | ||
|
|
f5c0bc194f | ||
|
|
1ca184c1ee | ||
|
|
7a93eddf1c | ||
|
|
f7fbfc54b3 | ||
|
|
0bc9dbe6f8 | ||
|
|
3d904d8e65 | ||
|
|
9c1e916c1c | ||
|
|
cd1d6a9178 | ||
|
|
a2396c30b2 | ||
|
|
dea1e12c88 | ||
|
|
17b98f6b1f | ||
|
|
40ee78f6e8 | ||
|
|
34a7df9ea0 | ||
|
|
cb2cdb7875 | ||
|
|
50c031e6aa | ||
|
|
7382a3ca89 | ||
|
|
d4ec0da49a | ||
|
|
2caf449b8b | ||
|
|
76b4e5cc51 | ||
|
|
2ad9164379 | ||
|
|
11a4b5ed69 | ||
|
|
01153b3271 | ||
|
|
1427ddaa59 | ||
|
|
b4a2b8d46c | ||
|
|
c51af2262e | ||
|
|
a13cf43b65 | ||
|
|
0fd749aa2c | ||
|
|
59602f790e | ||
|
|
0d18c32bca | ||
|
|
f0c1c6f8cc | ||
|
|
02151ac649 | ||
|
|
e3f87035b6 | ||
|
|
67534516c7 | ||
|
|
dc1734f0a6 | ||
|
|
2cfac3389a | ||
|
|
63d079cce9 | ||
|
|
a7fe137941 | ||
|
|
021ff1c7da | ||
|
|
762ba27036 | ||
|
|
8e5ccddc13 | ||
|
|
5bb175aede | ||
|
|
92e0dbde03 | ||
|
|
ab82cc5257 | ||
|
|
92018261aa | ||
|
|
c7292aa4b6 | ||
|
|
d7d59a24d1 | ||
|
|
b093929f91 | ||
|
|
c25b7ea898 | ||
|
|
69f7a8f4d1 | ||
|
|
80389911cc | ||
|
|
a2e829c7b9 | ||
|
|
85446f7a96 | ||
|
|
de623f240c | ||
|
|
7796bf3fa0 | ||
|
|
6239198d65 | ||
|
|
d9313cf561 | ||
|
|
ce1500fad3 | ||
|
|
117b0af831 | ||
|
|
2968ae667f | ||
|
|
92b72f186e | ||
|
|
3d67c58436 | ||
|
|
01298287f6 | ||
|
|
9dca54fdc8 | ||
|
|
0df0b9a505 | ||
|
|
c4647a9c1f | ||
|
|
597af9210f | ||
|
|
986e8fd6c5 | ||
|
|
c75c27c95c | ||
|
|
f1f831edef | ||
|
|
60f37e8c88 | ||
|
|
0c6e3188ac | ||
|
|
08261af4ab | ||
|
|
62e4177e44 | ||
|
|
74506a2a83 | ||
|
|
5fcfce18fd | ||
|
|
c07c4629ff | ||
|
|
3894ceebc9 | ||
|
|
b35a0b1b40 | ||
|
|
543a797d1a | ||
|
|
08c67302fc | ||
|
|
33cb61afb9 | ||
|
|
43ae15a887 | ||
|
|
a735dc027d | ||
|
|
5be9f4dfef | ||
|
|
d75234b675 | ||
|
|
3779e645b8 | ||
|
|
7f4799a4a2 | ||
|
|
a09d4ffb4e | ||
|
|
0c9b4bc3d2 | ||
|
|
778af070f0 | ||
|
|
bd815fbada | ||
|
|
1c22af8ef4 | ||
|
|
9687d91d72 | ||
|
|
a14d228e8b | ||
|
|
b64d2393ea | ||
|
|
d0173a486f | ||
|
|
215379cc27 | ||
|
|
a7cc58d42c | ||
|
|
430b8ca362 | ||
|
|
e6f6de406d | ||
|
|
ad95719a1d | ||
|
|
59bfa6bcd3 | ||
|
|
c7acda0062 | ||
|
|
51244abee9 | ||
|
|
eb1c9c1583 | ||
|
|
c227e7ab13 | ||
|
|
987c067f68 | ||
|
|
26a16dc5bb | ||
|
|
e41f9aad38 | ||
|
|
b6ea2a46d1 | ||
|
|
a2f0588004 | ||
|
|
b31cd189a7 | ||
|
|
3ba58d85e2 | ||
|
|
c6842605ac | ||
|
|
4d10cdb7e8 | ||
|
|
1bffa46d4d | ||
|
|
6e420c3556 | ||
|
|
a476f0fa59 | ||
|
|
6fb13c0fc5 | ||
|
|
c2291f7eb3 | ||
|
|
925a99a043 | ||
|
|
883b96aeb5 | ||
|
|
9204bd6204 | ||
|
|
b482c71b43 | ||
|
|
ff2874333d | ||
|
|
7214a438dc | ||
|
|
06ba4d5c28 | ||
|
|
1c3979dcfa | ||
|
|
f0925e1823 | ||
|
|
ba87413eeb | ||
|
|
a4cbdba388 | ||
|
|
70388da974 | ||
|
|
f0df4b653d | ||
|
|
507e06b5d2 | ||
|
|
c8f8b6957d | ||
|
|
10ac24874b | ||
|
|
8654197c56 | ||
|
|
b332c1055c | ||
|
|
7e0d8fce89 | ||
|
|
c56abe4247 | ||
|
|
ee02623171 | ||
|
|
6fa6b8a903 | ||
|
|
14e99a1a5d | ||
|
|
e2468c8928 | ||
|
|
b4df535e8d | ||
|
|
af4788fdef | ||
|
|
a2bdbfcd24 | ||
|
|
3fb7fdfc58 | ||
|
|
7e87c208ba | ||
|
|
f8ae2ef8b4 | ||
|
|
b238a9a457 | ||
|
|
8af3743e91 | ||
|
|
707a099b69 | ||
|
|
2b680ae7b8 | ||
|
|
6654d53611 | ||
|
|
9b6b1a277d | ||
|
|
a08d6e790d | ||
|
|
00feccde34 | ||
|
|
2c567edc1d | ||
|
|
51293f3750 | ||
|
|
b7024010d3 | ||
|
|
44258a7cce | ||
|
|
0f25d3b219 | ||
|
|
79a462c084 | ||
|
|
b307d6e766 | ||
|
|
abdecd2a9b | ||
|
|
3c3b18d858 | ||
|
|
8b49790784 | ||
|
|
abad24265e | ||
|
|
4095666013 | ||
|
|
877393e019 | ||
|
|
0d29bc00ec | ||
|
|
b203418720 | ||
|
|
3e3e387a45 | ||
|
|
04339fbe9a | ||
|
|
560b6432cc | ||
|
|
bea8936e02 | ||
|
|
95e8c4a615 | ||
|
|
c92db8a22a | ||
|
|
afecd70bc8 | ||
|
|
9296a76658 | ||
|
|
e6e569e07c | ||
|
|
ca9b2fe6ad | ||
|
|
70f0a725cc | ||
|
|
fd80e34037 | ||
|
|
7fc78b8b0f | ||
|
|
1039caabed | ||
|
|
5edb0fe96a | ||
|
|
2860f2f70a | ||
|
|
85b8a41ccb | ||
|
|
6e5d6ebf99 | ||
|
|
177e2e2672 | ||
|
|
72b133a875 | ||
|
|
dc0836804f | ||
|
|
a38beeaed2 | ||
|
|
1c61f0420a | ||
|
|
00b244853e | ||
|
|
1a030aa417 | ||
|
|
c28f1c0b42 | ||
|
|
a20c9b2d8e | ||
|
|
88a58abcf8 | ||
|
|
e16f0b9f12 | ||
|
|
4d0612abfc | ||
|
|
408076adf0 | ||
|
|
dde6d00ab9 | ||
|
|
4c64a0fab2 | ||
|
|
01a4844ac4 | ||
|
|
2ff132a62f | ||
|
|
f7424e1fea | ||
|
|
ab31256ee7 | ||
|
|
21e6679056 | ||
|
|
8d02e2cc52 | ||
|
|
39beafc99a | ||
|
|
fff929d5ab | ||
|
|
51619fdb00 | ||
|
|
7db1c69945 | ||
|
|
11a4f5e25d | ||
|
|
5590cad1ef | ||
|
|
09486c082c | ||
|
|
dab46296b6 | ||
|
|
7f314947dd | ||
|
|
56e4203450 | ||
|
|
b0990aa1fa | ||
|
|
e29d9de2dd | ||
|
|
7acc082fba | ||
|
|
362cdc5437 | ||
|
|
e1bce8c577 | ||
|
|
7760625b3f | ||
|
|
77c2332df6 | ||
|
|
8773c183b5 | ||
|
|
2daea9e4b4 | ||
|
|
abf847ce82 | ||
|
|
8d99d33128 | ||
|
|
a42a874faa | ||
|
|
be62635154 | ||
|
|
022d59f4a5 | ||
|
|
605d72133b | ||
|
|
b2d1d07d2e | ||
|
|
908055e37c | ||
|
|
90352d7223 | ||
|
|
d77a8f7aa1 | ||
|
|
364e4e03ef | ||
|
|
373d2ccf9f | ||
|
|
0a8df434d2 | ||
|
|
15be91b585 | ||
|
|
bfa67bb275 |
1
.flake8
1
.flake8
@@ -29,6 +29,7 @@ max-line-length = 99
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
# These still get linted when passed explicitly, as when spack flake8 passes
|
||||
|
||||
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
Normal file
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
name: "\U0001F4A5 Tests error"
|
||||
description: Some package in Spack had stand-alone tests that didn't pass
|
||||
title: "Testing issue: "
|
||||
labels: [test-error]
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the failure(s) or link(s) to test output(s)
|
||||
description: |
|
||||
Fill in the test output from the exact spec that is having stand-alone test failures. Links to test outputs (e.g., CDash) can also be provided.
|
||||
value: |
|
||||
```console
|
||||
$ spack spec -I <spec>
|
||||
...
|
||||
```
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system or the test runner
|
||||
description: Please include the output of `spack debug report` for your system.
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: |
|
||||
Please upload test logs or any additional information about the problem.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have reported the version of Spack/Python/Platform/Runner
|
||||
required: true
|
||||
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||
required: true
|
||||
- label: I have uploaded any available logs
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
44
.github/workflows/audit.yaml
vendored
Normal file
44
.github/workflows/audit.yaml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: audit
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
python_version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov 'coverage[toml]<=6.2'
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
7
.github/workflows/bootstrap-test.sh
vendored
Executable file
7
.github/workflows/bootstrap-test.sh
vendored
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap untrust spack-install
|
||||
$PYTHON bin/spack -d solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
exit 0
|
||||
126
.github/workflows/bootstrap.yml
vendored
126
.github/workflows/bootstrap.yml
vendored
@@ -3,33 +3,19 @@ name: Bootstrapping
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
workflow_call:
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -38,7 +24,9 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -49,7 +37,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -63,7 +50,6 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -75,7 +61,9 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -86,7 +74,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -100,7 +87,6 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -111,7 +97,9 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -122,7 +110,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -134,7 +121,6 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -145,13 +131,14 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
@@ -163,13 +150,12 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -183,53 +169,70 @@ jobs:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
# NOTE: test all pythons that exist, not all do on 12
|
||||
done
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
set -ex
|
||||
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
if (($not_found)) ; then
|
||||
echo Required python version $ver not found in runner!
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -240,7 +243,9 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -251,7 +256,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -264,7 +268,6 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -276,7 +279,9 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -287,7 +292,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -300,7 +304,6 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -308,7 +311,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -318,7 +321,6 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -326,7 +328,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -20,7 +20,7 @@ on:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -89,10 +89,10 @@ jobs:
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
uses: docker/setup-buildx-action@c74574e6c82eeedc46366be1b0d287eff9085eb6 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
92
.github/workflows/ci.yaml
vendored
Normal file
92
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
audit-ancient-python:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
needs: [ changes ]
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
python_version: 2.7
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
bootstrap: ${{ steps.filter.outputs.bootstrap }}
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
- '.github/workflows/ci.yaml'
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
bootstrap:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
243
.github/workflows/unit_tests.yaml
vendored
243
.github/workflows/unit_tests.yaml
vendored
@@ -1,118 +1,46 @@
|
||||
name: linux tests
|
||||
name: unit tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
filters: |
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
- id: coverage
|
||||
# Run the subsequent jobs with coverage if core has been modified,
|
||||
# regardless of whether this is a pull request or a push to a branch
|
||||
run: |
|
||||
echo Core changes: ${{ steps.filter.outputs.core }}
|
||||
echo Event name: ${{ github.event_name }}
|
||||
if [ "${{ steps.filter.outputs.core }}" == "true" ]
|
||||
then
|
||||
echo "::set-output name=with_coverage::true"
|
||||
else
|
||||
echo "::set-output name=with_coverage::false"
|
||||
fi
|
||||
|
||||
# Run unit tests with different configurations on linux
|
||||
unittests:
|
||||
needs: [ validate, style, changes ]
|
||||
ubuntu:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: 2.7
|
||||
concretizer: original
|
||||
- python-version: 3.9
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.10'
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -124,7 +52,7 @@ jobs:
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov "coverage[toml]<=6.2"
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-cov pytest-xdist
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
@@ -147,37 +75,28 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.concretizer == 'original' && matrix.python-version == '2.7') || (matrix.python-version == '3.10') }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -187,33 +106,25 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- name: Run shell tests (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
@@ -221,7 +132,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,13 +148,12 @@ jobs:
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -255,105 +165,60 @@ jobs:
|
||||
patchelf kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 clingo
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
build:
|
||||
needs: [ validate, style, changes ]
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade pytest codecov coverage[toml]==6.2
|
||||
pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap untrust spack-install
|
||||
$(which spack) solve zlib
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||
rm lib/spack/docs/_spack_root
|
||||
else
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml "${common_args[@]}"
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||
rm lib/spack/docs/_spack_root
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
60
.github/workflows/valid-style.yml
vendored
Normal file
60
.github/workflows/valid-style.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: style
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.10'
|
||||
109
.github/workflows/windows_python.yml
vendored
109
.github/workflows/windows_python.yml
vendored
@@ -1,17 +1,10 @@
|
||||
name: windows tests
|
||||
name: windows
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
@@ -19,91 +12,66 @@ defaults:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
validate:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.6-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.6-' -v spack/var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six setuptools flake8 "isort>=4.3.5" "mypy>=0.800" "click==8.0.4" "black<=21.12b0" pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Run style tests
|
||||
run: |
|
||||
spack style
|
||||
- name: Verify license headers
|
||||
run: |
|
||||
python spack\bin\spack license verify
|
||||
unittest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
||||
unittest-cmd:
|
||||
needs: [ validate, style ]
|
||||
cd spack
|
||||
dir
|
||||
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
||||
buildtest:
|
||||
needs: [ validate, style ]
|
||||
cd spack
|
||||
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -116,8 +84,7 @@ jobs:
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack install abseil-cpp
|
||||
generate-installer-test:
|
||||
needs: [ validate, style ]
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
@@ -125,15 +92,15 @@ jobs:
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
@@ -153,18 +120,18 @@ jobs:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: generate-installer-test
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
|
||||
@@ -62,6 +62,7 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
||||
95
bin/spack-tmpconfig
Executable file
95
bin/spack-tmpconfig
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
[[ -n "${TMPCONFIG_DEBUG:=}" ]] && set -x
|
||||
DIR="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
mkdir -p "${XDG_RUNTIME_DIR:=/tmp}/spack-tests"
|
||||
export TMPDIR="${XDG_RUNTIME_DIR}"
|
||||
export TMP_DIR="$(mktemp -d -t spack-test-XXXXX)"
|
||||
clean_up() {
|
||||
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "cleaning up: $TMP_DIR\n"
|
||||
rm -rf "$TMP_DIR"
|
||||
}
|
||||
trap clean_up EXIT
|
||||
trap clean_up ERR
|
||||
|
||||
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "Redirecting TMP_DIR and spack directories to $TMP_DIR\n"
|
||||
|
||||
export BOOTSTRAP="${SPACK_USER_CACHE_PATH:=$HOME/.spack}/bootstrap"
|
||||
export SPACK_USER_CACHE_PATH="$TMP_DIR/user_cache"
|
||||
mkdir -p "$SPACK_USER_CACHE_PATH"
|
||||
|
||||
private_bootstrap="$SPACK_USER_CACHE_PATH/bootstrap"
|
||||
use_spack=''
|
||||
use_bwrap=''
|
||||
# argument handling
|
||||
while (($# >= 1)) ; do
|
||||
case "$1" in
|
||||
-b) # privatize bootstrap too, useful for CI but not always cheap
|
||||
shift
|
||||
export BOOTSTRAP="$private_bootstrap"
|
||||
;;
|
||||
-B) # use specified bootstrap dir
|
||||
export BOOTSTRAP="$2"
|
||||
shift 2
|
||||
;;
|
||||
-s) # run spack directly with remaining args
|
||||
shift
|
||||
use_spack=1
|
||||
;;
|
||||
--contain=bwrap)
|
||||
if bwrap --help 2>&1 > /dev/null ; then
|
||||
use_bwrap=1
|
||||
else
|
||||
echo Bubblewrap containment requested, but no bwrap command found
|
||||
exit 1
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
typeset -a CMD
|
||||
if [[ -n "$use_spack" ]] ; then
|
||||
CMD=("$DIR/spack" "$@")
|
||||
else
|
||||
CMD=("$@")
|
||||
fi
|
||||
|
||||
mkdir -p "$BOOTSTRAP"
|
||||
|
||||
export SPACK_SYSTEM_CONFIG_PATH="$TMP_DIR/sys_conf"
|
||||
export SPACK_USER_CONFIG_PATH="$TMP_DIR/user_conf"
|
||||
mkdir -p "$SPACK_USER_CONFIG_PATH"
|
||||
cat >"$SPACK_USER_CONFIG_PATH/config.yaml" <<EOF
|
||||
config:
|
||||
install_tree:
|
||||
root: $TMP_DIR/install
|
||||
misc_cache: $$user_cache_path/cache
|
||||
source_cache: $$user_cache_path/source
|
||||
EOF
|
||||
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
|
||||
bootstrap:
|
||||
root: $BOOTSTRAP
|
||||
EOF
|
||||
|
||||
if [[ -n "$use_bwrap" ]] ; then
|
||||
CMD=(
|
||||
bwrap
|
||||
--dev-bind / /
|
||||
--ro-bind "$DIR/.." "$DIR/.." # do not touch spack root
|
||||
--ro-bind $HOME/.spack $HOME/.spack # do not touch user config/cache dir
|
||||
--bind "$TMP_DIR" "$TMP_DIR"
|
||||
--bind "$BOOTSTRAP" "$BOOTSTRAP"
|
||||
--die-with-parent
|
||||
"${CMD[@]}"
|
||||
)
|
||||
fi
|
||||
|
||||
(( ${TMPCONFIG_DEBUG:=0} > 1)) && echo "Running: ${CMD[@]}"
|
||||
"${CMD[@]}"
|
||||
@@ -9,6 +9,8 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
- name: 'github-actions-v0.1'
|
||||
@@ -18,5 +20,5 @@ bootstrap:
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.2: true
|
||||
github-actions-v0.3: true
|
||||
spack-install: true
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
prefix_inspections:
|
||||
lib:
|
||||
./lib:
|
||||
- DYLD_FALLBACK_LIBRARY_PATH
|
||||
lib64:
|
||||
./lib64:
|
||||
- DYLD_FALLBACK_LIBRARY_PATH
|
||||
|
||||
@@ -14,23 +14,24 @@
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
# Paths to check when creating modules for all module sets
|
||||
# This maps paths in the package install prefix to environment variables
|
||||
# they should be added to. For example, <prefix>/bin should be in PATH.
|
||||
prefix_inspections:
|
||||
bin:
|
||||
./bin:
|
||||
- PATH
|
||||
man:
|
||||
./man:
|
||||
- MANPATH
|
||||
share/man:
|
||||
./share/man:
|
||||
- MANPATH
|
||||
share/aclocal:
|
||||
./share/aclocal:
|
||||
- ACLOCAL_PATH
|
||||
lib/pkgconfig:
|
||||
./lib/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
lib64/pkgconfig:
|
||||
./lib64/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
share/pkgconfig:
|
||||
./share/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
'':
|
||||
./:
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
# These are configurations for the module set named "default"
|
||||
|
||||
@@ -49,9 +49,8 @@ packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file found
|
||||
in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
|
||||
directory. Here's an example of an external configuration:
|
||||
External packages are configured through the ``packages.yaml`` file.
|
||||
Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -97,11 +96,14 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
The packages configuration can tell Spack to use an external location
|
||||
for certain package versions, but it does not restrict Spack to using
|
||||
external packages. In the above example, since newer versions of OpenMPI
|
||||
are available, Spack will choose to start building and linking with the
|
||||
latest version rather than continue using the pre-installed OpenMPI versions.
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
|
||||
but it does not prevent Spack from building packages from sources. In the above example,
|
||||
Spack might choose for many valid reasons to start building and linking with the
|
||||
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
@@ -121,9 +123,15 @@ be:
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI, and it will instead always rely on a pre-built
|
||||
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
|
||||
a package name.
|
||||
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
|
||||
OpenMPI.
|
||||
|
||||
.. note::
|
||||
|
||||
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
|
||||
pre-built specs include specs already available from a local store, an upstream store, a registered
|
||||
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
|
||||
external specs in ``packages.yaml`` are included in the list of pre-built specs.
|
||||
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
@@ -132,6 +140,10 @@ The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-buildable virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Virtual packages in Spack can also be specified as not buildable, and
|
||||
external implementations can be provided. In the example above,
|
||||
OpenMPI is configured as not buildable, but Spack will often prefer
|
||||
@@ -153,21 +165,37 @@ but more conveniently:
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Implementations can also be listed immediately under the virtual they provide:
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
|
||||
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
|
||||
(available via stores or buildcaches) are not wanted, Spack can be configured to require
|
||||
specs matching only the available externals:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
|
||||
mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel
|
||||
require:
|
||||
- one_of: [
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
|
||||
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
]
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
|
||||
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
|
||||
:ref:`package-requirements`.
|
||||
|
||||
.. _cmd-spack-external-find:
|
||||
|
||||
@@ -194,11 +222,6 @@ Specific limitations include:
|
||||
* Packages are not discoverable by default: For a package to be
|
||||
discoverable with ``spack external find``, it needs to add special
|
||||
logic. See :ref:`here <make-package-findable>` for more details.
|
||||
* The current implementation only collects and examines executable files,
|
||||
so it is typically only useful for build/run dependencies (in some cases
|
||||
if a library package also provides an executable, it may be possible to
|
||||
extract a meaningful Spec by running the executable - for example the
|
||||
compiler wrappers in MPI implementations).
|
||||
* The logic does not search through module files, it can only detect
|
||||
packages with executables defined in ``PATH``; you can help Spack locate
|
||||
externals which use module files by loading any associated modules for
|
||||
@@ -369,7 +392,7 @@ The following is an example of how to enforce package properties in
|
||||
require: "@1.13.2"
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["~cuda", "gcc"]
|
||||
- any_of: ["~cuda", "%gcc"]
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
@@ -396,15 +419,69 @@ choose between a set of options using ``any_of`` or ``one_of``:
|
||||
``mpich`` already includes a conflict, so this is redundant but
|
||||
still demonstrates the concept).
|
||||
|
||||
Other notes about ``requires``:
|
||||
.. note::
|
||||
|
||||
* You can only specify requirements for specific packages: you cannot
|
||||
add ``requires`` under ``all``.
|
||||
* You cannot specify requirements for virtual packages (e.g. you can
|
||||
specify requirements for ``openmpi`` but not ``mpi``).
|
||||
* For ``any_of`` and ``one_of``, the order of specs indicates a
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
For ``any_of`` and ``one_of``, the order of specs indicates a
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can also set default requirements for all packages under ``all``
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
|
||||
which means every spec will be required to use ``clang`` as a compiler.
|
||||
|
||||
Note that in this case ``all`` represents a *default set of requirements* -
|
||||
if there are specific package requirements, then the default requirements
|
||||
under ``all`` are disregarded. For example, with a configuration like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
cmake:
|
||||
require: '%gcc'
|
||||
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including cmake dependencies)
|
||||
to use ``clang``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting requirements on virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG. This
|
||||
can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
|
||||
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
|
||||
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if present. For
|
||||
instance with a configuration like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
mvapich2:
|
||||
require: '~cuda'
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
|
||||
@@ -50,8 +50,9 @@ important to understand.
|
||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||
`flit <https://flit.pypa.io/>`_,
|
||||
`poetry <https://python-poetry.org/>`_,
|
||||
`hatchling <https://hatch.pypa.io/latest/>`_, and
|
||||
`meson <https://meson-python.readthedocs.io/>`_.
|
||||
`hatchling <https://hatch.pypa.io/latest/>`_,
|
||||
`meson <https://meson-python.readthedocs.io/>`_, and
|
||||
`pdm <https://pdm.fming.dev/latest/>`_.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Downloading
|
||||
@@ -368,6 +369,16 @@ it uses the meson build system. Meson uses the default
|
||||
See https://meson-python.readthedocs.io/en/latest/usage/start.html
|
||||
for more information.
|
||||
|
||||
"""
|
||||
pdm
|
||||
"""
|
||||
|
||||
If the ``pyproject.toml`` lists ``pdm.pep517.api`` as the ``build-backend``,
|
||||
it uses the PDM build system. PDM uses the default ``pyproject.toml``
|
||||
keys to list dependencies.
|
||||
|
||||
See https://pdm.fming.dev/latest/ for more information.
|
||||
|
||||
""""""
|
||||
wheels
|
||||
""""""
|
||||
@@ -571,6 +582,19 @@ libraries. Make sure not to add modules/packages containing the word
|
||||
"test", as these likely won't end up in the installation directory,
|
||||
or may require test dependencies like pytest to be installed.
|
||||
|
||||
Instead of defining the ``import_modules`` explicity, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = ['nilearn.plotting']
|
||||
|
||||
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||
|
||||
Import tests can be run during the installation using ``spack install
|
||||
--test=root`` or at any time after the installation using
|
||||
``spack test run``.
|
||||
@@ -758,3 +782,4 @@ For more information on build backend tools, see:
|
||||
* poetry: https://python-poetry.org/
|
||||
* hatchling: https://hatch.pypa.io/latest/
|
||||
* meson: https://meson-python.readthedocs.io/
|
||||
* pdm: https://pdm.fming.dev/latest/
|
||||
|
||||
@@ -127,6 +127,7 @@ def setup(sphinx):
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.todo",
|
||||
"sphinx.ext.viewcode",
|
||||
"sphinx_design",
|
||||
"sphinxcontrib.programoutput",
|
||||
]
|
||||
|
||||
@@ -201,6 +202,7 @@ def setup(sphinx):
|
||||
("py:class", "unittest.case.TestCase"),
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
||||
@@ -19,9 +19,9 @@ see the default settings by looking at
|
||||
These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details.
|
||||
|
||||
--------------------
|
||||
``install_tree``
|
||||
--------------------
|
||||
---------------------
|
||||
``install_tree:root``
|
||||
---------------------
|
||||
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
|
||||
@@ -478,14 +478,21 @@ them to the Environment.
|
||||
spack:
|
||||
include:
|
||||
- relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- /absolute/path/to/packages.yaml
|
||||
|
||||
Environments can include files with either relative or absolute
|
||||
paths. Inline configurations take precedence over included
|
||||
configurations, so you don't have to change shared configuration files
|
||||
to make small changes to an individual Environment. Included configs
|
||||
listed earlier will have higher precedence, as the included configs are
|
||||
applied in reverse order.
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline configurations take precedence over included configurations, so
|
||||
you don't have to change shared configuration files to make small changes
|
||||
to an individual environment. Included configurations listed earlier will
|
||||
have higher precedence, as the included configs are applied in reverse order.
|
||||
|
||||
-------------------------------
|
||||
Manually Editing the Specs List
|
||||
@@ -616,31 +623,6 @@ The following two Environment manifests are identical:
|
||||
Spec matrices can be used to install swaths of software across various
|
||||
toolchains.
|
||||
|
||||
The concretization logic for spec matrices differs slightly from the
|
||||
rest of Spack. If a variant or dependency constraint from a matrix is
|
||||
invalid, Spack will reject the constraint and try again without
|
||||
it. For example, the following two Environment manifests will produce
|
||||
the same specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- matrix:
|
||||
- [zlib, libelf, hdf5+mpi]
|
||||
- [^mvapich2@2.2, ^openmpi@3.1.0]
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- zlib
|
||||
- libelf
|
||||
- hdf5+mpi ^mvapich2@2.2
|
||||
- hdf5+mpi ^openmpi@3.1.0
|
||||
|
||||
This allows one to create toolchains out of combinations of
|
||||
constraints and apply them somewhat indiscriminately to packages,
|
||||
without regard for the applicability of the constraint.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Spec List References
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1004,7 +986,7 @@ A typical workflow is as follows:
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
spack -e . env depfile -o Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
@@ -1017,7 +999,6 @@ load, even when packages are built in parallel.
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
@@ -1027,8 +1008,17 @@ By default the following phony convenience targets are available:
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying dependencies on generated ``make`` targets
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An interesting question is how to include generated ``Makefile``\s in your own
|
||||
``Makefile``\s. This comes up when you want to install an environment that provides
|
||||
executables required in a command for a make target of your own.
|
||||
|
||||
The example below shows how to accomplish this: the ``env`` target specifies
|
||||
the generated ``spack/env`` target as a prerequisite, meaning that the environment
|
||||
gets installed and is available for use in the ``env`` target.
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
@@ -1054,11 +1044,10 @@ The following advanced example shows how generated targets can be used in a
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated target
|
||||
``spack/env`` is available. In the above example, the ``env`` target uses this generated
|
||||
target as a prerequisite, meaning that it can make use of the installed packages in
|
||||
its commands.
|
||||
This works as follows: when ``make`` is invoked, it first "remakes" the missing
|
||||
include ``env.mk`` as there is a target for it. This triggers concretization of
|
||||
the environment and makes spack output ``env.mk``. At that point the
|
||||
generated target ``spack/env`` becomes available through ``include env.mk``.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
@@ -1069,3 +1058,24 @@ the include is conditional.
|
||||
the ``--make-target-prefix`` flag and use the non-phony target
|
||||
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
||||
``<target-prefix>/all``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Building a subset of the environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The generated ``Makefile``\s contain install targets for each spec. Given the hash
|
||||
of a particular spec, you can use the ``.install/<hash>`` target to install the
|
||||
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
|
||||
its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o Makefile --make-target-prefix my_env
|
||||
|
||||
# Install dependencies in parallel, only show a log on error.
|
||||
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
|
||||
# Install the root spec with verbose output.
|
||||
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||
@@ -23,8 +23,36 @@ be present on the machine where Spack is run:
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms. A build matrix showing which packages are
|
||||
working on which systems is planned but not yet available.
|
||||
to work on all platforms.
|
||||
|
||||
A build matrix showing which packages are working on which systems is shown below.
|
||||
|
||||
.. tab-set::
|
||||
|
||||
.. tab-item:: Debian/Ubuntu
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
yum update -y
|
||||
yum install -y epel-release
|
||||
yum update -y
|
||||
yum --enablerepo epel groupinstall -y "Development Tools"
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute make patch python3 python3-pip python3-setuptools unzip
|
||||
python3 -m pip install boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
|
||||
@@ -14,7 +14,13 @@ problems if you encounter them.
|
||||
Spack does not seem to respect ``packages.yaml``
|
||||
------------------------------------------------
|
||||
|
||||
A common problem in Spack v0.18 and above is that package, compiler and target
|
||||
.. note::
|
||||
|
||||
This issue is **resolved** as of v0.19.0.dev0 commit
|
||||
`8281a0c5feabfc4fe180846d6fe95cfe53420bc5`, through the introduction of package
|
||||
requirements. See :ref:`package-requirements`.
|
||||
|
||||
A common problem in Spack v0.18.0 up to v0.19.0.dev0 is that package, compiler and target
|
||||
preferences specified in ``packages.yaml`` do not seem to be respected. Spack picks the
|
||||
"wrong" compilers and their versions, package versions and variants, and
|
||||
micro-architectures.
|
||||
|
||||
@@ -77,7 +77,7 @@ installation of a package.
|
||||
|
||||
Spack only generates modulefiles when a package is installed. If
|
||||
you attempt to install a package and it is already installed, Spack
|
||||
will not regenerate modulefiles for the package. This may to
|
||||
will not regenerate modulefiles for the package. This may lead to
|
||||
inconsistent modulefiles if the Spack module configuration has
|
||||
changed since the package was installed, either by editing a file
|
||||
or changing scopes or environments.
|
||||
|
||||
@@ -4561,6 +4561,9 @@ other checks.
|
||||
* - :ref:`AutotoolsPackage <autotoolspackage>`
|
||||
- ``check`` (``make test``, ``make check``)
|
||||
- ``installcheck`` (``make installcheck``)
|
||||
* - :ref:`CachedCMakePackage <cachedcmakepackage>`
|
||||
- ``check`` (``make check``, ``make test``)
|
||||
- Not applicable
|
||||
* - :ref:`CMakePackage <cmakepackage>`
|
||||
- ``check`` (``make check``, ``make test``)
|
||||
- Not applicable
|
||||
@@ -4585,6 +4588,9 @@ other checks.
|
||||
* - :ref:`SIPPackage <sippackage>`
|
||||
- Not applicable
|
||||
- ``test`` (module imports)
|
||||
* - :ref:`WafPackage <wafpackage>`
|
||||
- ``build_test`` (must be overridden)
|
||||
- ``install_test`` (must be overridden)
|
||||
|
||||
For example, the ``Libelf`` package inherits from ``AutotoolsPackage``
|
||||
and its ``Makefile`` has a standard ``check`` target. So Spack will
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _pipelines:
|
||||
|
||||
=========
|
||||
Pipelines
|
||||
=========
|
||||
============
|
||||
CI Pipelines
|
||||
============
|
||||
|
||||
Spack provides commands that support generating and running automated build
|
||||
pipelines designed for Gitlab CI. At the highest level it works like this:
|
||||
@@ -168,7 +168,7 @@ which specs are up to date and which need to be rebuilt (it's a good idea for ot
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subesequent pipeline runs.
|
||||
during subsequent pipeline runs.
|
||||
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
@@ -267,24 +267,64 @@ generated by jobs in the pipeline.
|
||||
``spack ci rebuild``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The purpose of the ``spack ci rebuild`` is straightforward: take its assigned
|
||||
spec job, check whether the target mirror already has a binary for that spec,
|
||||
and if not, build the spec from source and push the binary to the mirror. To
|
||||
accomplish this in a reproducible way, the sub-command prepares a ``spack install``
|
||||
command line to build a single spec in the DAG, saves that command in a
|
||||
shell script, ``install.sh``, in the current working directory, and then runs
|
||||
it to install the spec. The shell script is also exported as an artifact to
|
||||
aid in reproducing the build outside of the CI environment.
|
||||
The purpose of ``spack ci rebuild`` is straightforward: take its assigned
|
||||
spec and ensure a binary of a successful build exists on the target mirror.
|
||||
If the binary does not already exist, it is built from source and pushed
|
||||
to the mirror. The associated stand-alone tests are optionally run against
|
||||
the new build. Additionally, files for reproducing the build outside of the
|
||||
CI environment are created to facilitate debugging.
|
||||
|
||||
If it was necessary to install the spec from source, ``spack ci rebuild`` will
|
||||
also subsequently create a binary package for the spec and try to push it to the
|
||||
mirror.
|
||||
If a binary for the spec does not exist on the target mirror, an install
|
||||
shell script, ``install.sh``, is created and saved in the current working
|
||||
directory. The script is run in a job to install the spec from source. The
|
||||
resulting binary package is pushed to the mirror. If ``cdash`` is configured
|
||||
for the environment, then the build results will be uploaded to the site.
|
||||
|
||||
The ``spack ci rebuild`` sub-command mainly expects its "input" to come either
|
||||
from environment variables or from the ``gitlab-ci`` section of the ``spack.yaml``
|
||||
environment file. There are two main sources of the environment variables, some
|
||||
are written into ``.gitlab-ci.yml`` by ``spack ci generate``, and some are
|
||||
provided by the GitLab CI runtime.
|
||||
Environment variables and values in the ``gitlab-ci`` section of the
|
||||
``spack.yaml`` environment file provide inputs to this process. The
|
||||
two main sources of environment variables are variables written into
|
||||
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
|
||||
Several key CI pipeline variables are described in
|
||||
:ref:`ci_environment_variables`.
|
||||
|
||||
If the ``--tests`` option is provided, stand-alone tests are performed but
|
||||
only if the build was successful *and* the package does not appear in the
|
||||
list of ``broken-tests-packages``. A shell script, ``test.sh``, is created
|
||||
and run to perform the tests. On completion, test logs are exported as job
|
||||
artifacts for review and to facilitate debugging. If `cdash` is configured,
|
||||
test results are also uploaded to the site.
|
||||
|
||||
A snippet from an example ``spack.yaml`` file illustrating use of this
|
||||
option *and* specification of a package with broken tests is given below.
|
||||
The inclusion of a spec for building ``gptune`` is not shown here. Note
|
||||
that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
``gitlab-ci`` script.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gitlab-ci:
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
||||
In this case, even if ``gptune`` is successfully built from source, the
|
||||
pipeline will *not* run its stand-alone tests since the package is listed
|
||||
under ``broken-tests-packages``.
|
||||
|
||||
Spack's cloud pipelines provide actual, up-to-date examples of the CI/CD
|
||||
configuration and environment files used by Spack. You can find them
|
||||
under Spack's `stacks
|
||||
<https://github.com/spack/spack/tree/develop/share/spack/gitlab/cloud_pipelines/stacks>`_ repository directory.
|
||||
|
||||
.. _cmd-spack-ci-rebuild-index:
|
||||
|
||||
@@ -447,7 +487,7 @@ Note about "no-op" jobs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If no specs in an environment need to be rebuilt during a given pipeline run
|
||||
(meaning all are already up to date on the mirror), a single succesful job
|
||||
(meaning all are already up to date on the mirror), a single successful job
|
||||
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
||||
considers to be an error). An optional ``service-job-attributes`` section
|
||||
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
||||
@@ -725,7 +765,7 @@ above with ``git checkout ${SPACK_CHECKOUT_VERSION}``.
|
||||
On the other hand, if you're pointing to a spack repository and branch under your
|
||||
control, there may be no benefit in using the captured ``SPACK_CHECKOUT_VERSION``,
|
||||
and you can instead just clone using the variables you define (``SPACK_REPO``
|
||||
and ``SPACK_REF`` in the example aboves).
|
||||
and ``SPACK_REF`` in the example above).
|
||||
|
||||
.. _custom_workflow:
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
|
||||
@@ -18,7 +18,10 @@ spack:
|
||||
- "py-sphinx@3.4:4.1.1,4.1.3:"
|
||||
- py-sphinxcontrib-programoutput
|
||||
- py-docutils@:0.16
|
||||
- py-sphinx-design
|
||||
- py-sphinx-rtd-theme
|
||||
- py-pygments@:2.12
|
||||
|
||||
# VCS
|
||||
- git
|
||||
- mercurial
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
* Version: 0.1.4 (commit e2cfdc266174488dee78b8c9058e36d60dc1b548)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
@@ -106,7 +106,7 @@ def __eq__(self, other):
|
||||
self.name == other.name
|
||||
and self.vendor == other.vendor
|
||||
and self.features == other.features
|
||||
and self.ancestors == other.ancestors
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
)
|
||||
|
||||
@@ -1099,8 +1099,7 @@
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha",
|
||||
"umip"
|
||||
"sha"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
@@ -1263,7 +1262,6 @@
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"umip",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
@@ -2249,7 +2247,7 @@
|
||||
}
|
||||
},
|
||||
"graviton2": {
|
||||
"from": ["aarch64"],
|
||||
"from": ["graviton"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2319,6 +2317,107 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"graviton3": {
|
||||
"from": ["graviton2"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"rng"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:4.8.9",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "4.9:5.9",
|
||||
"flags": "-march=armv8-a+crc+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.9",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7:7.9",
|
||||
"flags" : "-march=armv8.2-a+crypto+fp16 -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.9",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.9",
|
||||
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+nodotprod -mtune=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:",
|
||||
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+ssbs+i8mm+bf16+nodotprod -mtune=neoverse-v1"
|
||||
}
|
||||
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags" : "-march=armv8.2-a+fp16+crc+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:21.9",
|
||||
"flags" : "-march=armv8.2-a+sve+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
|
||||
9
lib/spack/external/ctest_log_parser.py
vendored
9
lib/spack/external/ctest_log_parser.py
vendored
@@ -71,6 +71,8 @@
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -409,7 +411,12 @@ def parse(self, stream, context=6, jobs=None):
|
||||
pool = multiprocessing.Pool(jobs)
|
||||
try:
|
||||
# this is a workaround for a Python bug in Pool with ctrl-C
|
||||
results = pool.map_async(_parse_unpack, args, 1).get(9999999)
|
||||
if sys.version_info >= (3, 2):
|
||||
max_timeout = threading.TIMEOUT_MAX
|
||||
else:
|
||||
max_timeout = 9999999
|
||||
results = pool.map_async(_parse_unpack, args, 1).get(max_timeout)
|
||||
|
||||
errors, warnings, timings = zip(*results)
|
||||
finally:
|
||||
pool.terminate()
|
||||
|
||||
@@ -22,9 +22,9 @@
|
||||
from llnl.util import tty
|
||||
from llnl.util.compat import Sequence
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
@@ -113,6 +113,69 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
try:
|
||||
file_cmd = which("file", required=True)
|
||||
except CommandNotFoundError as e:
|
||||
if is_windows:
|
||||
raise CommandNotFoundError("`file` utility is not available on Windows")
|
||||
else:
|
||||
raise e
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type_compressed():
|
||||
"""Same as _get_mime_type but attempts to check for
|
||||
compression first
|
||||
"""
|
||||
mime_uncompressed = _get_mime_type()
|
||||
mime_uncompressed.add_default_arg("-Z")
|
||||
return mime_uncompressed
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
def compressed_mime_type(filename):
|
||||
"""Same as mime_type but checks for type that has been compressed
|
||||
|
||||
Args:
|
||||
filename (str): file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type_compressed()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -170,9 +233,14 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
|
||||
Keyword Arguments:
|
||||
string (bool): Treat regex as a plain string. Default it False
|
||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is True
|
||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent (bool): Ignore any files that don't exist.
|
||||
Default is False
|
||||
start_at (str): Marker used to start applying the replacements. If a
|
||||
text line matches this marker filtering is started at the next line.
|
||||
All contents before the marker and the marker itself are copied
|
||||
verbatim. Default is to start filtering from the first line of the
|
||||
file.
|
||||
stop_at (str): Marker used to stop scanning the file further. If a text
|
||||
line matches this marker filtering is stopped and the rest of the
|
||||
file is copied verbatim. Default is to filter until the end of the
|
||||
@@ -181,6 +249,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
string = kwargs.get("string", False)
|
||||
backup = kwargs.get("backup", False)
|
||||
ignore_absent = kwargs.get("ignore_absent", False)
|
||||
start_at = kwargs.get("start_at", None)
|
||||
stop_at = kwargs.get("stop_at", None)
|
||||
|
||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||
@@ -229,6 +298,7 @@ def groupid_to_group(x):
|
||||
# reached or we found a marker in the line if it was specified
|
||||
with open(tmp_filename, mode="r", **extra_kwargs) as input_file:
|
||||
with open(filename, mode="w", **extra_kwargs) as output_file:
|
||||
do_filtering = start_at is None
|
||||
# Using iter and readline is a workaround needed not to
|
||||
# disable input_file.tell(), which will happen if we call
|
||||
# input_file.next() implicitly via the for loop
|
||||
@@ -238,8 +308,12 @@ def groupid_to_group(x):
|
||||
if stop_at == line.strip():
|
||||
output_file.write(line)
|
||||
break
|
||||
filtered_line = re.sub(regex, repl, line)
|
||||
output_file.write(filtered_line)
|
||||
if do_filtering:
|
||||
filtered_line = re.sub(regex, repl, line)
|
||||
output_file.write(filtered_line)
|
||||
else:
|
||||
do_filtering = start_at == line.strip()
|
||||
output_file.write(line)
|
||||
else:
|
||||
current_position = None
|
||||
|
||||
@@ -637,7 +711,11 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
if os.path.isabs(target):
|
||||
new_target = re.sub(abs_src, abs_dest, target)
|
||||
|
||||
def escaped_path(path):
|
||||
return path.replace("\\", r"\\")
|
||||
|
||||
new_target = re.sub(escaped_path(abs_src), escaped_path(abs_dest), target)
|
||||
if new_target != target:
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
@@ -1903,7 +1981,11 @@ def names(self):
|
||||
name = x[3:]
|
||||
|
||||
# Valid extensions include: ['.dylib', '.so', '.a']
|
||||
for ext in [".dylib", ".so", ".a"]:
|
||||
# on non Windows platform
|
||||
# Windows valid library extensions are:
|
||||
# ['.dll', '.lib']
|
||||
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
|
||||
for ext in valid_exts:
|
||||
i = name.rfind(ext)
|
||||
if i != -1:
|
||||
names.append(name[:i])
|
||||
@@ -2046,15 +2128,23 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
raise TypeError(message)
|
||||
|
||||
if is_windows:
|
||||
static_ext = "lib"
|
||||
shared_ext = "dll"
|
||||
else:
|
||||
# Used on both Linux and macOS
|
||||
static_ext = "a"
|
||||
shared_ext = "so"
|
||||
|
||||
# Construct the right suffix for the library
|
||||
if shared:
|
||||
# Used on both Linux and macOS
|
||||
suffixes = ["so"]
|
||||
suffixes = [shared_ext]
|
||||
if sys.platform == "darwin":
|
||||
# Only used on macOS
|
||||
suffixes.append("dylib")
|
||||
else:
|
||||
suffixes = ["a"]
|
||||
suffixes = [static_ext]
|
||||
|
||||
# List of libraries we are searching with suffixes
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
@@ -2067,7 +2157,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
# finally search all of root recursively. The search stops when the first
|
||||
# match is found.
|
||||
for subdir in ("lib", "lib64"):
|
||||
common_lib_dirs = ["lib", "lib64"]
|
||||
if is_windows:
|
||||
common_lib_dirs.extend(["bin", "Lib"])
|
||||
|
||||
for subdir in common_lib_dirs:
|
||||
dirname = join_path(root, subdir)
|
||||
if not os.path.isdir(dirname):
|
||||
continue
|
||||
@@ -2080,6 +2174,155 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
||||
def find_all_shared_libraries(root, recursive=False):
|
||||
"""Convenience function that returns the list of all shared libraries found
|
||||
in the directory passed as argument.
|
||||
|
||||
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||
"""
|
||||
return find_libraries("*", root=root, shared=True, recursive=recursive)
|
||||
|
||||
|
||||
def find_all_static_libraries(root, recursive=False):
|
||||
"""Convenience function that returns the list of all static libraries found
|
||||
in the directory passed as argument.
|
||||
|
||||
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||
"""
|
||||
return find_libraries("*", root=root, shared=False, recursive=recursive)
|
||||
|
||||
|
||||
def find_all_libraries(root, recursive=False):
|
||||
"""Convenience function that returns the list of all libraries found
|
||||
in the directory passed as argument.
|
||||
|
||||
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||
"""
|
||||
|
||||
return find_all_shared_libraries(root, recursive=recursive) + find_all_static_libraries(
|
||||
root, recursive=recursive
|
||||
)
|
||||
|
||||
|
||||
class WindowsSimulatedRPath(object):
|
||||
"""Class representing Windows filesystem rpath analog
|
||||
|
||||
One instance of this class is associated with a package (only on Windows)
|
||||
For each lib/binary directory in an associated package, this class introduces
|
||||
a symlink to any/all dependent libraries/binaries. This includes the packages
|
||||
own bin/lib directories, meaning the libraries are linked to the bianry directory
|
||||
and vis versa.
|
||||
"""
|
||||
|
||||
def __init__(self, package, link_install_prefix=True):
|
||||
"""
|
||||
Args:
|
||||
package (spack.package_base.PackageBase): Package requiring links
|
||||
link_install_prefix (bool): Link against package's own install or stage root.
|
||||
Packages that run their own executables during build and require rpaths to
|
||||
the build directory during build time require this option. Default: install
|
||||
root
|
||||
"""
|
||||
self.pkg = package
|
||||
self._addl_rpaths = set()
|
||||
self.link_install_prefix = link_install_prefix
|
||||
self._internal_links = set()
|
||||
|
||||
@property
|
||||
def link_dest(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
if hasattr(self.pkg, "libs") and self.pkg.libs:
|
||||
pkg_libs = set(self.pkg.libs.directories)
|
||||
else:
|
||||
pkg_libs = set((self.pkg.prefix.lib, self.pkg.prefix.lib64))
|
||||
|
||||
return pkg_libs | set([self.pkg.prefix.bin]) | self.internal_links
|
||||
|
||||
@property
|
||||
def internal_links(self):
|
||||
"""
|
||||
linking that would need to be established within the package itself. Useful for links
|
||||
against extension modules/build time executables/internal linkage
|
||||
"""
|
||||
return self._internal_links
|
||||
|
||||
def add_internal_links(self, *dest):
|
||||
"""
|
||||
Incorporate additional paths into the rpath (sym)linking scheme.
|
||||
|
||||
Paths provided to this method are linked against by a package's libraries
|
||||
and libraries found at these paths are linked against a package's binaries.
|
||||
(i.e. /site-packages -> /bin and /bin -> /site-packages)
|
||||
|
||||
Specified paths should be outside of a package's lib, lib64, and bin
|
||||
directories.
|
||||
"""
|
||||
self._internal_links = self._internal_links | set(*dest)
|
||||
|
||||
@property
|
||||
def link_targets(self):
|
||||
"""
|
||||
Set of libraries this package needs to link against during runtime
|
||||
These packages will each be symlinked into the packages lib and binary dir
|
||||
"""
|
||||
|
||||
dependent_libs = []
|
||||
for path in self.pkg.rpath:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||
for extra_path in self._addl_rpaths:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||
return set(dependent_libs)
|
||||
|
||||
def include_additional_link_paths(self, *paths):
|
||||
"""
|
||||
Add libraries found at the root of provided paths to runtime linking
|
||||
|
||||
These are libraries found outside of the typical scope of rpath linking
|
||||
that require manual inclusion in a runtime linking scheme
|
||||
|
||||
Args:
|
||||
*paths (str): arbitrary number of paths to be added to runtime linking
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def establish_link(self):
|
||||
"""
|
||||
(sym)link packages to runtime dependencies based on RPath configuration for
|
||||
Windows heuristics
|
||||
"""
|
||||
# from build_environment.py:463
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally
|
||||
|
||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||
# install a symlink to each dependent library
|
||||
for library, lib_dir in itertools.product(self.link_targets, self.link_dest):
|
||||
if not path_contains_subdirectory(library, lib_dir):
|
||||
file_name = os.path.basename(library)
|
||||
dest_file = os.path.join(lib_dir, file_name)
|
||||
if os.path.exists(lib_dir):
|
||||
try:
|
||||
symlink(library, dest_file)
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
except OSError as e:
|
||||
if e.winerror == 183:
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (library, dest_file)
|
||||
+ "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists." % file_name
|
||||
)
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
|
||||
@@ -386,8 +386,12 @@ def _ensure_parent_directory(self):
|
||||
try:
|
||||
os.makedirs(parent)
|
||||
except OSError as e:
|
||||
# makedirs can fail when diretory already exists.
|
||||
if not (e.errno == errno.EEXIST and os.path.isdir(parent) or e.errno == errno.EISDIR):
|
||||
# os.makedirs can fail in a number of ways when the directory already exists.
|
||||
# With EISDIR, we know it exists, and others like EEXIST, EACCES, and EROFS
|
||||
# are fine if we ensure that the directory exists.
|
||||
# Python 3 allows an exist_ok parameter and ignores any OSError as long as
|
||||
# the directory exists.
|
||||
if not (e.errno == errno.EISDIR or os.path.isdir(parent)):
|
||||
raise
|
||||
return parent
|
||||
|
||||
|
||||
@@ -150,13 +150,17 @@ def color_when(value):
|
||||
|
||||
|
||||
class match_to_ansi(object):
|
||||
def __init__(self, color=True):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
return "\033[%sm" % s
|
||||
if self.enclose:
|
||||
return r"\[\033[%sm\]" % s
|
||||
else:
|
||||
return "\033[%sm" % s
|
||||
else:
|
||||
return ""
|
||||
|
||||
@@ -201,9 +205,11 @@ def colorize(string, **kwargs):
|
||||
Keyword Arguments:
|
||||
color (bool): If False, output will be plain text without control
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
string = re.sub(color_re, match_to_ansi(color), string)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
|
||||
|
||||
@@ -228,8 +228,8 @@ def __init__(self, controller_function, minion_function):
|
||||
self.minion_function = minion_function
|
||||
|
||||
# these can be optionally set to change defaults
|
||||
self.controller_timeout = 1
|
||||
self.sleep_time = 0
|
||||
self.controller_timeout = 3
|
||||
self.sleep_time = 0.1
|
||||
|
||||
def start(self, **kwargs):
|
||||
"""Start the controller and minion processes.
|
||||
|
||||
@@ -35,9 +35,11 @@ def _search_duplicate_compilers(error_cls):
|
||||
the decorator object, that will forward the keyword arguments passed
|
||||
as input.
|
||||
"""
|
||||
import ast
|
||||
import collections
|
||||
import inspect
|
||||
import itertools
|
||||
import pickle
|
||||
import re
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
@@ -49,6 +51,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
@@ -261,6 +264,14 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
package_properties = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-PROPERTIES",
|
||||
description="Sanity checks on properties a package should maintain",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
@@ -353,6 +364,145 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
|
||||
errors.append(error_cls(error_msg, []))
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
except Exception as e:
|
||||
error_msg = "Package '{}' failed to pickle".format(pkg_name)
|
||||
details = ["{}".format(str(e))]
|
||||
errors.append(error_cls(error_msg, details))
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_packages_are_unparseable(pkgs, error_cls):
|
||||
"""Ensure that all packages can unparse and that unparsed code is valid Python"""
|
||||
import spack.util.package_hash as ph
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
try:
|
||||
source = ph.canonical_source(pkg_name, filter_multimethods=False)
|
||||
except Exception as e:
|
||||
error_msg = "Package '{}' failed to unparse".format(pkg_name)
|
||||
details = ["{}".format(str(e))]
|
||||
errors.append(error_cls(error_msg, details))
|
||||
continue
|
||||
|
||||
try:
|
||||
compile(source, "internal", "exec", ast.PyCF_ONLY_AST)
|
||||
except Exception as e:
|
||||
error_msg = "The unparsed package '{}' failed to compile".format(pkg_name)
|
||||
details = ["{}".format(str(e))]
|
||||
errors.append(error_cls(error_msg, details))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
for version in pkg.versions:
|
||||
assert spack.fetch_strategy.for_package_version(pkg, version)
|
||||
except Exception as e:
|
||||
error_msg = "The package '{}' cannot produce a fetcher for some of its versions"
|
||||
details = ["{}".format(str(e))]
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
"""Ensure the package has a docstring and no fixmes"""
|
||||
errors = []
|
||||
fixme_regexes = [
|
||||
re.compile(r"remove this boilerplate"),
|
||||
re.compile(r"FIXME: Put"),
|
||||
re.compile(r"FIXME: Add"),
|
||||
re.compile(r"example.com"),
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
if pattern:
|
||||
details.append(
|
||||
"%s:%d: boilerplate needs to be removed: %s" % (filename, i, line.strip())
|
||||
)
|
||||
if details:
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
|
||||
def invalid_sha256_digest(fetcher):
|
||||
if getattr(fetcher, "digest", None):
|
||||
h = spack.util.crypto.hash_algo_for_digest(fetcher.digest)
|
||||
if h != "sha256":
|
||||
return h, True
|
||||
return None, False
|
||||
|
||||
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
|
||||
details = []
|
||||
for v, args in pkg.versions.items():
|
||||
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
||||
digest, is_bad = invalid_sha256_digest(fetcher)
|
||||
if is_bad:
|
||||
details.append("{}@{} uses {}".format(pkg_name, v, digest))
|
||||
|
||||
for _, resources in pkg.resources.items():
|
||||
for resource in resources:
|
||||
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
||||
if is_bad:
|
||||
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
|
||||
if details:
|
||||
errors.append(error_cls(error_msg, details))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
@@ -490,6 +640,36 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
# Permitting a default that is an instance on 'int' permits
|
||||
# to have foo=false or foo=0. Other falsish values are
|
||||
# not allowed, since they can't be parsed from cli ('foo=')
|
||||
isinstance(variant.default, int)
|
||||
or variant.default
|
||||
)
|
||||
if not default_is_parsable:
|
||||
error_msg = "Variant '{}' of package '{}' has a bad default value"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
continue
|
||||
|
||||
vspec = variant.make_default()
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
error_msg = "The variant '{}' default value in package '{}' cannot be validated"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
import ruamel.yaml as yaml
|
||||
from six.moves.urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -26,7 +27,6 @@
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -654,7 +654,7 @@ def get_buildfile_manifest(spec):
|
||||
|
||||
for filename in files:
|
||||
path_name = os.path.join(root, filename)
|
||||
m_type, m_subtype = relocate.mime_type(path_name)
|
||||
m_type, m_subtype = fsys.mime_type(path_name)
|
||||
rel_path_name = os.path.relpath(path_name, spec.prefix)
|
||||
added = False
|
||||
|
||||
@@ -1231,7 +1231,7 @@ def try_fetch(url_to_fetch):
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
except web_util.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
@@ -1954,7 +1954,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
if not os.path.exists(stage.save_filename):
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
except web_util.FetchError:
|
||||
continue
|
||||
|
||||
tty.debug("Found key {0}".format(fingerprint))
|
||||
@@ -2106,7 +2106,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except fs.FetchError as e:
|
||||
except web_util.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
@@ -40,10 +41,13 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = "metadata.yaml"
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
@@ -260,12 +264,11 @@ def mirror_scope(self):
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = "bootstrap_buildcache"
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
self.last_search = None
|
||||
self.config_scope_name = "bootstrap_buildcache-{}".format(uuid.uuid4())
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
@@ -378,13 +381,12 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
|
||||
config_scope_name = "bootstrap_source"
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
self.config_scope_name = "bootstrap_source-{}".format(uuid.uuid4())
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
info = {}
|
||||
@@ -655,6 +657,8 @@ def _add_externals_if_missing():
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if is_windows:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
@@ -788,17 +792,46 @@ def ensure_gpg_in_path_or_raise():
|
||||
|
||||
def patchelf_root_spec():
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# TODO: patchelf is restricted to v0.13 since earlier versions have
|
||||
# TODO: bugs that we don't to deal with, while v0.14 requires a C++17
|
||||
# TODO: which may not be available on all platforms.
|
||||
return _root_spec("patchelf@0.13.1:0.13.99")
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
return _root_spec("patchelf@0.13.1:")
|
||||
|
||||
|
||||
def verify_patchelf(patchelf):
|
||||
"""Older patchelf versions can produce broken binaries, so we
|
||||
verify the version here.
|
||||
|
||||
Arguments:
|
||||
|
||||
patchelf (spack.util.executable.Executable): patchelf executable
|
||||
"""
|
||||
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||
if patchelf.returncode != 0:
|
||||
return False
|
||||
parts = out.split(" ")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
try:
|
||||
version = spack.version.Version(parts[1])
|
||||
except ValueError:
|
||||
return False
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise():
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"], abstract_spec=patchelf_root_spec()
|
||||
)
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
# concretize again with an upperbound @:13.
|
||||
try:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf
|
||||
)
|
||||
except RuntimeError:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"],
|
||||
abstract_spec=_root_spec("patchelf@0.13.1:0.13"),
|
||||
cmd_check=verify_patchelf,
|
||||
)
|
||||
|
||||
|
||||
###
|
||||
|
||||
@@ -64,7 +64,9 @@
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
EnvironmentModifications,
|
||||
@@ -108,7 +110,14 @@
|
||||
|
||||
|
||||
# Platform-specific library suffix.
|
||||
dso_suffix = "dylib" if sys.platform == "darwin" else "so"
|
||||
if sys.platform == "darwin":
|
||||
dso_suffix = "dylib"
|
||||
elif sys.platform == "win32":
|
||||
dso_suffix = "dll"
|
||||
else:
|
||||
dso_suffix = "so"
|
||||
|
||||
stat_suffix = "lib" if sys.platform == "win32" else "a"
|
||||
|
||||
|
||||
def should_set_parallel_jobs(jobserver_support=False):
|
||||
@@ -192,6 +201,8 @@ def clean_environment():
|
||||
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
# env.unset('MAKEFLAGS')
|
||||
@@ -985,8 +996,24 @@ def add_modifications_for_dep(dep):
|
||||
dpkg = dep.package
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
|
||||
# Allow dependencies to modify the module
|
||||
dpkg.setup_dependent_package(spec.package.module, spec)
|
||||
# Get list of modules that may need updating
|
||||
modules = []
|
||||
for cls in inspect.getmro(type(spec.package)):
|
||||
module = cls.module
|
||||
if module == spack.package_base:
|
||||
break
|
||||
modules.append(module)
|
||||
|
||||
# Execute changes as if on a single module
|
||||
# copy dict to ensure prior changes are available
|
||||
changes = spack.util.pattern.Bunch()
|
||||
dpkg.setup_dependent_package(changes, spec)
|
||||
|
||||
for module in modules:
|
||||
module.__dict__.update(changes.__dict__)
|
||||
|
||||
if context == "build":
|
||||
dpkg.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
@@ -1029,8 +1056,11 @@ def get_cmake_prefix_path(pkg):
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
build_link_prefixes = filter_system_paths(x.prefix for x in ordered_build_link_deps)
|
||||
return build_link_prefixes
|
||||
cmake_prefix_path_entries = []
|
||||
for spec in ordered_build_link_deps:
|
||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||
|
||||
return filter_system_paths(cmake_prefix_path_entries)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
@@ -1279,15 +1309,6 @@ def make_stack(tb, stack=None):
|
||||
return lines
|
||||
|
||||
|
||||
class InstallError(spack.error.SpackError):
|
||||
"""Raised by packages when a package fails to install.
|
||||
|
||||
Any subclass of InstallError will be annotated by Spack with a
|
||||
``pkg`` attribute on failure, which the caller can use to get the
|
||||
package for which the exception was raised.
|
||||
"""
|
||||
|
||||
|
||||
class ChildError(InstallError):
|
||||
"""Special exception class for wrapping exceptions from child processes
|
||||
in Spack's build environment.
|
||||
|
||||
@@ -244,8 +244,11 @@ def _patch_usr_bin_file(self):
|
||||
scripts to use file from path."""
|
||||
|
||||
if self.spec.os.startswith("nixos"):
|
||||
for configure_file in fs.find(".", files=["configure"], recursive=True):
|
||||
fs.filter_file("/usr/bin/file", "file", configure_file, string=True)
|
||||
x = fs.FileFilter(
|
||||
*filter(fs.is_exe, fs.find(self.build_directory, "configure", recursive=True))
|
||||
)
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
@@ -262,34 +265,97 @@ def _set_autotools_environment_variables(self):
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
below."""
|
||||
|
||||
# Exit early if we are required not to patch libtool-related problems:
|
||||
if not self.patch_libtool:
|
||||
return
|
||||
|
||||
x = fs.FileFilter(
|
||||
*filter(fs.is_exe, fs.find(self.build_directory, "configure", recursive=True))
|
||||
)
|
||||
|
||||
# There are distributed automatically generated files that depend on the configure script
|
||||
# and require additional tools for rebuilding.
|
||||
# See https://github.com/spack/spack/pull/30768#issuecomment-1219329860
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
# Fix parsing of compiler output when collecting predeps and postdeps
|
||||
# https://lists.gnu.org/archive/html/bug-libtool/2016-03/msg00003.html
|
||||
x.filter(regex=r'^(\s*if test x-L = )("\$p" \|\|\s*)$', repl=r"\1x\2")
|
||||
x.filter(
|
||||
regex=r'^(\s*test x-R = )("\$p")(; then\s*)$', repl=r'\1x\2 || test x-l = x"$p"\3'
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
flags for the Arm, Clang/Flang, Fujitsu and NVHPC compilers."""
|
||||
values for libtool variables.
|
||||
|
||||
# Exit early if we are required not to patch libtool
|
||||
The generated libtool script supports mixed compilers through tags:
|
||||
``libtool --tag=CC/CXX/FC/...```. For each tag there is a block with variables,
|
||||
which defines what flags to pass to the compiler. The default variables (which
|
||||
are used by the default tag CC) are set in a block enclosed by
|
||||
``# ### {BEGIN,END} LIBTOOL CONFIG``. For non-default tags, there are
|
||||
corresponding blocks ``# ### {BEGIN,END} LIBTOOL TAG CONFIG: {CXX,FC,F77}`` at
|
||||
the end of the file (after the exit command). libtool evals these blocks.
|
||||
Whenever we need to update variables that the configure script got wrong
|
||||
(for example cause it did not recognize the compiler), we should properly scope
|
||||
those changes to these tags/blocks so they only apply to the compiler we care
|
||||
about. Below, ``start_at`` and ``stop_at`` are used for that."""
|
||||
|
||||
# Exit early if we are required not to patch libtool:
|
||||
if not self.patch_libtool:
|
||||
return
|
||||
|
||||
for libtool_path in fs.find(self.build_directory, "libtool", recursive=True):
|
||||
self._patch_libtool(libtool_path)
|
||||
x = fs.FileFilter(
|
||||
*filter(fs.is_exe, fs.find(self.build_directory, "libtool", recursive=True))
|
||||
)
|
||||
|
||||
def _patch_libtool(self, libtool_path):
|
||||
if (
|
||||
self.spec.satisfies("%arm")
|
||||
or self.spec.satisfies("%clang")
|
||||
or self.spec.satisfies("%fj")
|
||||
or self.spec.satisfies("%nvhpc")
|
||||
):
|
||||
fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
|
||||
fs.filter_file(
|
||||
'pic_flag=""\n', 'pic_flag="{0}"\n'.format(self.compiler.cc_pic_flag), libtool_path
|
||||
# Exit early if there is nothing to patch:
|
||||
if not x.filenames:
|
||||
return
|
||||
|
||||
markers = {"cc": "LIBTOOL CONFIG"}
|
||||
for tag in ["cxx", "fc", "f77"]:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.compiler.name == "nag":
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl='wl="{0}"'.format(self.compiler.linker_arg),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
else:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.compiler.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for cc, marker in markers.items():
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl='pic_flag="{0}"'.format(getattr(self.compiler, "{0}_pic_flag".format(cc))),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
if self.spec.satisfies("%fj"):
|
||||
fs.filter_file("-nostdlib", "", libtool_path)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.compiler.name == "fj":
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
objfile = [
|
||||
for o in [
|
||||
"fjhpctag.o",
|
||||
"fjcrt0.o",
|
||||
"fjlang08.o",
|
||||
@@ -297,9 +363,86 @@ def _patch_libtool(self, libtool_path):
|
||||
"crti.o",
|
||||
"crtbeginS.o",
|
||||
"crtendS.o",
|
||||
]
|
||||
for o in objfile:
|
||||
fs.filter_file(rehead + o, "", libtool_path)
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="", string=True)
|
||||
elif self.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
elif self.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
stop_at = "# ### END {0}".format(marker)
|
||||
# Libtool 2.4.2 does not know the shared flag:
|
||||
x.filter(
|
||||
regex=r"\$CC -shared",
|
||||
repl=r"\$CC -Wl,-shared",
|
||||
string=True,
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
# Libtool does not know how to inject whole archives
|
||||
# (e.g. https://github.com/pmodels/mpich/issues/4358):
|
||||
x.filter(
|
||||
regex=r'^whole_archive_flag_spec="\\\$({?wl}?)--whole-archive'
|
||||
r'\\\$convenience \\\$\1--no-whole-archive"$',
|
||||
repl=r'whole_archive_flag_spec="\$\1--whole-archive'
|
||||
r"\`for conv in \$convenience\\\\\"\\\\\"; do test -n \\\\\"\$conv\\\\\" && "
|
||||
r"new_convenience=\\\\\"\$new_convenience,\$conv\\\\\"; done; "
|
||||
r'func_echo_all \\\\\"\$new_convenience\\\\\"\` \$\1--no-whole-archive"',
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
# The compiler requires special treatment in certain cases:
|
||||
x.filter(
|
||||
regex=r"^(with_gcc=.*)$",
|
||||
repl="\\1\n\n# Is the compiler the NAG compiler?\nwith_nag=yes",
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
|
||||
# Disable the special treatment for gcc and g++:
|
||||
for tag in ["cc", "cxx"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex=r"^(with_gcc=.*)$",
|
||||
repl="\\1\n\n# Is the compiler the NAG compiler?\nwith_nag=no",
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
|
||||
# The compiler does not support -pthread flag, which might come
|
||||
# from the inherited linker flags. We prepend the flag with -Wl,
|
||||
# before using it:
|
||||
x.filter(
|
||||
regex=r"^(\s*)(for tmp_inherited_linker_flag in \$tmp_inherited_linker_flags; "
|
||||
r"do\s*)$",
|
||||
repl='\\1if test "x$with_nag" = xyes; then\n'
|
||||
"\\1 revert_nag_pthread=$tmp_inherited_linker_flags\n"
|
||||
"\\1 tmp_inherited_linker_flags="
|
||||
"`$ECHO \"$tmp_inherited_linker_flags\" | $SED 's% -pthread% -Wl,-pthread%g'`\n"
|
||||
'\\1 test x"$revert_nag_pthread" = x"$tmp_inherited_linker_flags" && '
|
||||
"revert_nag_pthread=no || revert_nag_pthread=yes\n"
|
||||
"\\1fi\n\\1\\2",
|
||||
start_at='if test -n "$inherited_linker_flags"; then',
|
||||
stop_at='case " $new_inherited_linker_flags " in',
|
||||
)
|
||||
# And revert the modification to produce '*.la' files that can be
|
||||
# used with gcc (normally, we do not install the files but they can
|
||||
# still be used during the building):
|
||||
start_at = '# Time to change all our "foo.ltframework" stuff back to "-framework foo"'
|
||||
stop_at = "# installed libraries to the beginning of the library search list"
|
||||
x.filter(
|
||||
regex=r"(\s*)(# move library search paths that coincide with paths to not "
|
||||
r"yet\s*)$",
|
||||
repl='\\1test x"$with_nag$revert_nag_pthread" = xyesyes &&\n'
|
||||
'\\1 new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | '
|
||||
"$SED 's% -Wl,-pthread% -pthread%g'`\n\\1\\2",
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self):
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -176,7 +175,7 @@ def _std_args(pkg):
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", convert_to_posix_path(pkg.prefix)),
|
||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
@@ -41,6 +41,9 @@ class CudaPackage(PackageBase):
|
||||
"75",
|
||||
"80",
|
||||
"86",
|
||||
"87",
|
||||
"89",
|
||||
"90",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -100,6 +103,11 @@ def cuda_flags(arch_list):
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
@@ -128,10 +136,11 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@10:", when="+cuda ^cuda@:11.0")
|
||||
conflicts("%gcc@11:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:11.8")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
@@ -75,7 +75,7 @@ class MesonPackage(PackageBase):
|
||||
@property
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on Meson"""
|
||||
return [os.path.join(self.build_directory, "meson-logs/meson-log.txt")]
|
||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||
|
||||
@property
|
||||
def root_mesonlists_dir(self):
|
||||
@@ -138,13 +138,21 @@ def flags_to_build_system_args(self, flags):
|
||||
# Has to be dynamic attribute due to caching
|
||||
setattr(self, "meson_flag_args", [])
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
"""Returns the directory name to use when building the package
|
||||
|
||||
:return: name of the subdirectory for building the package
|
||||
"""
|
||||
return "spack-build-%s" % self.spec.dag_hash(7)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""Returns the directory to use when building the package
|
||||
|
||||
:return: directory where to build the package
|
||||
"""
|
||||
return os.path.join(self.stage.source_path, "spack-build")
|
||||
return os.path.join(self.stage.path, self.build_dirname)
|
||||
|
||||
def meson_args(self):
|
||||
"""Produces a list containing all the arguments that must be passed to
|
||||
|
||||
@@ -33,7 +33,7 @@ class PythonPackage(PackageBase):
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None # type: Optional[str]
|
||||
|
||||
maintainers = ["adamjstewart"]
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
# Default phases
|
||||
phases = ["install"]
|
||||
@@ -138,12 +138,28 @@ def import_modules(self):
|
||||
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
||||
)
|
||||
|
||||
modules = [mod for mod in modules if re.match("[a-zA-Z0-9._]+$", mod)]
|
||||
modules = [
|
||||
mod
|
||||
for mod in modules
|
||||
if re.match("[a-zA-Z0-9._]+$", mod) and not any(map(mod.startswith, self.skip_modules))
|
||||
]
|
||||
|
||||
tty.debug("Detected the following modules: {0}".format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
@property
|
||||
def skip_modules(self):
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of import_modules. If a module has submodules,
|
||||
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||
|
||||
Returns:
|
||||
list: list of strings of module names
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""The root directory of the Python package.
|
||||
@@ -227,8 +243,8 @@ def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Headers may be in either location
|
||||
include = inspect.getmodule(self).include
|
||||
platlib = inspect.getmodule(self).platlib
|
||||
include = self.prefix.join(self.spec["python"].package.include)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||
headers = find_all_headers(include) + find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
@@ -243,7 +259,7 @@ def libs(self):
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||
root = inspect.getmodule(self).platlib
|
||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = find_libraries(library, root, shared=shared, recursive=True)
|
||||
|
||||
@@ -4,13 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import base64
|
||||
import codecs
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from six import iteritems
|
||||
@@ -20,6 +24,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -33,9 +38,13 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.reporters.cdash import CDash
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
from spack.spec import Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
"always",
|
||||
@@ -60,69 +69,6 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = "Creating buildgroup failed (response code = {0}".format(response_code)
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
|
||||
|
||||
def _populate_buildgroup(job_names, group_name, project, site, credentials, cdash_url):
|
||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||
|
||||
headers = {
|
||||
"Authorization": "Bearer {0}".format(credentials),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = _create_buildgroup(opener, headers, url, project, group_name, "Daily")
|
||||
group_id = _create_buildgroup(
|
||||
opener, headers, url, project, "Latest {0}".format(group_name), "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = "Failed to create or retrieve buildgroups for {0}".format(group_name)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
data = {
|
||||
"project": project,
|
||||
"buildgroupid": group_id,
|
||||
"dynamiclist": [
|
||||
{"match": name, "parentgroupid": parent_group_id, "site": site} for name in job_names
|
||||
],
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = "Error response code ({0}) in _populate_buildgroup".format(response_code)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == "specs" else False
|
||||
|
||||
@@ -180,12 +126,6 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
return format_str.format(*format_args)
|
||||
|
||||
|
||||
def _get_cdash_build_name(spec, build_group):
|
||||
return "{0}@{1}%{2} arch={3} ({4})".format(
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group
|
||||
)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
@@ -458,6 +398,14 @@ def _spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def _remove_attributes(src_dict, dest_dict):
|
||||
if "tags" in src_dict and "tags" in dest_dict:
|
||||
# For 'tags', we remove any tags that are listed for removal
|
||||
for tag in src_dict["tags"]:
|
||||
while tag in dest_dict["tags"]:
|
||||
dest_dict["tags"].remove(tag)
|
||||
|
||||
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
@@ -491,19 +439,23 @@ def _find_matching_config(spec, gitlab_ci):
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
ci_mappings = gitlab_ci["mappings"]
|
||||
for ci_mapping in ci_mappings:
|
||||
matched = False
|
||||
only_first = gitlab_ci.get("match_behavior", "first") == "first"
|
||||
for ci_mapping in gitlab_ci["mappings"]:
|
||||
for match_string in ci_mapping["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "remove-attributes" in ci_mapping:
|
||||
_remove_attributes(ci_mapping["remove-attributes"], runner_attributes)
|
||||
if "runner-attributes" in ci_mapping:
|
||||
_copy_attributes(
|
||||
overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
|
||||
)
|
||||
return runner_attributes
|
||||
else:
|
||||
return None
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return runner_attributes
|
||||
return runner_attributes if matched else None
|
||||
|
||||
|
||||
def _pkg_name_from_spec_label(spec_label):
|
||||
@@ -672,27 +624,14 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
gitlab_ci = yaml_root["gitlab-ci"]
|
||||
|
||||
build_group = None
|
||||
enable_cdash_reporting = False
|
||||
cdash_auth_token = None
|
||||
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
build_group = cdash_handler.build_group if cdash_handler else None
|
||||
|
||||
if "cdash" in yaml_root:
|
||||
enable_cdash_reporting = True
|
||||
ci_cdash = yaml_root["cdash"]
|
||||
build_group = ci_cdash["build-group"]
|
||||
cdash_url = ci_cdash["url"]
|
||||
cdash_project = ci_cdash["project"]
|
||||
cdash_site = ci_cdash["site"]
|
||||
|
||||
if "SPACK_CDASH_AUTH_TOKEN" in os.environ:
|
||||
tty.verbose("Using CDash auth token from environment")
|
||||
cdash_auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
||||
|
||||
prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||
if prune_untouched_packages:
|
||||
prune_untouched_packages = False
|
||||
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
||||
# Requested to prune untouched packages, but assume we won't do that
|
||||
# unless we're actually in a git repo.
|
||||
prune_untouched_packages = False
|
||||
rev1, rev2 = get_change_revisions()
|
||||
tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
|
||||
if rev1 and rev2:
|
||||
@@ -708,6 +647,14 @@ def generate_gitlab_ci_yaml(
|
||||
for s in affected_specs:
|
||||
tty.debug(" {0}".format(s.name))
|
||||
|
||||
# Allow overriding --prune-dag cli opt with environment variable
|
||||
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
||||
if prune_dag_override is not None:
|
||||
prune_dag = True if prune_dag_override.lower() == "true" else False
|
||||
|
||||
# If we are not doing any kind of pruning, we are rebuilding everything
|
||||
rebuild_everything = not prune_dag and not prune_untouched_packages
|
||||
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
@@ -720,8 +667,6 @@ def generate_gitlab_ci_yaml(
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
@@ -729,6 +674,12 @@ def generate_gitlab_ci_yaml(
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
# trying to build.
|
||||
broken_specs_url = ""
|
||||
@@ -820,6 +771,7 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
@@ -833,7 +785,8 @@ def generate_gitlab_ci_yaml(
|
||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_local_mirror_dir = os.path.relpath(local_mirror_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
@@ -934,7 +887,7 @@ def generate_gitlab_ci_yaml(
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
tags.extend(["aws", "protected"])
|
||||
tags.extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
tags.extend(["public"])
|
||||
|
||||
@@ -1090,9 +1043,37 @@ def generate_gitlab_ci_yaml(
|
||||
continue
|
||||
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(
|
||||
"{0} ({1})".format(release_spec, release_spec_dag_hash)
|
||||
)
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append(
|
||||
@@ -1101,14 +1082,23 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if enable_cdash_reporting:
|
||||
cdash_build_name = _get_cdash_build_name(release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = cdash_build_name
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
variables.update(job_vars)
|
||||
|
||||
artifact_paths = [rel_job_log_dir, rel_job_repro_dir, rel_user_artifacts_dir]
|
||||
artifact_paths = [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
]
|
||||
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
@@ -1176,11 +1166,9 @@ def generate_gitlab_ci_yaml(
|
||||
)
|
||||
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if enable_cdash_reporting and cdash_auth_token:
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
_populate_buildgroup(
|
||||
all_job_names, build_group, cdash_project, cdash_site, cdash_auth_token, cdash_url
|
||||
)
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn("Problem populating buildgroup: {0}".format(err))
|
||||
else:
|
||||
@@ -1264,32 +1252,6 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append("stage-copy-buildcache")
|
||||
copy_job = {
|
||||
"stage": "stage-copy-buildcache",
|
||||
"tags": ["spack", "public", "medium", "aws", "x86_64"],
|
||||
"image": "ghcr.io/spack/python-aws-bash:0.0.1",
|
||||
"when": "on_success",
|
||||
"interruptible": True,
|
||||
"retry": service_job_retries,
|
||||
"script": [
|
||||
". ./share/spack/setup-env.sh",
|
||||
"spack --version",
|
||||
"aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}".format(
|
||||
src_url, dest_url
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
output_object["copy-mirror"] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append("stage-rebuild-index")
|
||||
@@ -1341,8 +1303,12 @@ def generate_gitlab_ci_yaml(
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
@@ -1352,6 +1318,21 @@ def generate_gitlab_ci_yaml(
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Write out the file describing specs that should be copied
|
||||
copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
|
||||
|
||||
if not os.path.exists(copy_specs_dir):
|
||||
os.makedirs(copy_specs_dir)
|
||||
|
||||
copy_specs_file = os.path.join(
|
||||
copy_specs_dir,
|
||||
"copy_{}_specs.json".format(spack_stack_name if spack_stack_name else "rebuilt"),
|
||||
)
|
||||
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
@@ -1385,13 +1366,11 @@ def generate_gitlab_ci_yaml(
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
error_msg = (
|
||||
"Pipeline generation failed due to the presence of the "
|
||||
"following specs that are known to be broken in develop:\n"
|
||||
)
|
||||
for broken_spec in known_broken_specs_encountered:
|
||||
error_msg += "* {0}\n".format(broken_spec)
|
||||
tty.die(error_msg)
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
@@ -1609,33 +1588,83 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
raise inst
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
"""Looks for spack-build-out.txt in the stage directory of the given
|
||||
job_spec, and attempts to copy the file into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Arguments:
|
||||
|
||||
job_spec (spack.spec.Spec): Spec associated with spack install log
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
"""Remove all mirrors from the given config scope, the exceptions being
|
||||
any listed in in mirrors_to_keep, which is a list of mirror urls.
|
||||
"""
|
||||
mirrors_to_remove = []
|
||||
for name, mirror_url in spack.config.get("mirrors", scope=scope).items():
|
||||
if mirror_url not in mirrors_to_keep:
|
||||
mirrors_to_remove.append(name)
|
||||
|
||||
for mirror_name in mirrors_to_remove:
|
||||
spack.mirror.remove(mirror_name, scope)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||
src, artifacts_dir, str(err)
|
||||
)
|
||||
tty.error(msg)
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
"""Copy selected build stage file(s) to the given artifacts directory
|
||||
|
||||
Looks for spack-build-out.txt in the stage directory of the given
|
||||
job_spec, and attempts to copy the file into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Parameters:
|
||||
job_spec (spack.spec.Spec): spec associated with spack install log
|
||||
job_log_dir (str): path into which build log should be copied
|
||||
"""
|
||||
tty.debug("job spec: {0}".format(job_spec))
|
||||
if not job_spec:
|
||||
msg = "Cannot copy stage logs: job spec ({0}) is required"
|
||||
tty.error(msg.format(job_spec))
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0.fullname}".format(job_pkg))
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug("stage dir: {0}".format(stage_dir))
|
||||
build_out_src = os.path.join(stage_dir, "spack-build-out.txt")
|
||||
build_out_dst = os.path.join(job_log_dir, "spack-build-out.txt")
|
||||
tty.debug(
|
||||
"Copying build log ({0}) to artifacts ({1})".format(build_out_src, build_out_dst)
|
||||
)
|
||||
shutil.copyfile(build_out_src, build_out_dst)
|
||||
except Exception as inst:
|
||||
msg = (
|
||||
"Unable to copy build logs from stage to artifacts " "due to exception: {0}"
|
||||
).format(inst)
|
||||
tty.error(msg)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
msg = "Cannot copy stage logs: job spec ({0}) must be concrete"
|
||||
tty.error(msg.format(job_spec))
|
||||
return
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug("stage dir: {0}".format(stage_dir))
|
||||
build_out_src = os.path.join(stage_dir, "spack-build-out.txt")
|
||||
copy_files_to_artifacts(build_out_src, job_log_dir)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
"""
|
||||
Copy test log file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
test_stage (str): test stage path
|
||||
job_test_dir (str): the destination artifacts test directory
|
||||
"""
|
||||
tty.debug("test stage: {0}".format(test_stage))
|
||||
if not os.path.exists(test_stage):
|
||||
msg = "Cannot copy test logs: job test stage ({0}) does not exist"
|
||||
tty.error(msg.format(test_stage))
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
@@ -1985,3 +2014,392 @@ def reproduce_ci_job(url, work_dir):
|
||||
)
|
||||
|
||||
print("".join(inst_list))
|
||||
|
||||
|
||||
def process_command(cmd, cmd_args, repro_dir):
|
||||
"""
|
||||
Create a script for and run the command. Copy the script to the
|
||||
reproducibility directory.
|
||||
|
||||
Arguments:
|
||||
cmd (str): name of the command being processed
|
||||
cmd_args (list): string arguments to pass to the command
|
||||
repro_dir (str): Job reproducibility directory
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
tty.debug("spack {0} arguments: {1}".format(cmd, cmd_args))
|
||||
|
||||
# Write the command to a shell script
|
||||
script = "{0}.sh".format(cmd)
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/bash\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(cmd))
|
||||
fd.write(" ".join(['"{0}"'.format(i) for i in cmd_args]))
|
||||
fd.write("\n")
|
||||
|
||||
st = os.stat(script)
|
||||
os.chmod(script, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# a login shell.
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["bash", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(cmd))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(cmd, exit_code))
|
||||
return exit_code
|
||||
|
||||
|
||||
def create_buildcache(**kwargs):
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): dictionary of arguments used to create the buildcache
|
||||
|
||||
List of recognized keys:
|
||||
|
||||
* "env" (spack.environment.Environment): the active environment
|
||||
* "buildcache_mirror_url" (str or None): URL for the buildcache mirror
|
||||
* "pipeline_mirror_url" (str or None): URL for the pipeline mirror
|
||||
* "pr_pipeline" (bool): True if the CI job is for a PR
|
||||
* "json_path" (str): path the the spec's JSON file
|
||||
"""
|
||||
env = kwargs.get("env")
|
||||
buildcache_mirror_url = kwargs.get("buildcache_mirror_url")
|
||||
pipeline_mirror_url = kwargs.get("pipeline_mirror_url")
|
||||
pr_pipeline = kwargs.get("pr_pipeline")
|
||||
json_path = kwargs.get("json_path")
|
||||
|
||||
sign_binaries = pr_pipeline is False and can_sign_binaries()
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
push_mirror_contents(env, json_path, buildcache_mirror_url, sign_binaries)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries)
|
||||
|
||||
|
||||
def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dict):
|
||||
"""Given a url to write to and the details of the failed job, write an entry
|
||||
in the broken specs list.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(file_path, "w") as fd:
|
||||
fd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
file_path,
|
||||
url,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Error writing to broken specs list {0}: {1}".format(url, err)
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def read_broken_spec(broken_spec_url):
|
||||
"""Read data from broken specs file located at the url, return as a yaml
|
||||
object.
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
||||
return None
|
||||
|
||||
broken_spec_contents = codecs.getreader("utf-8")(fs).read()
|
||||
return syaml.load(broken_spec_contents)
|
||||
|
||||
|
||||
def display_broken_spec_messages(base_url, hashes):
|
||||
"""Fetch the broken spec file for each of the hashes under the base_url and
|
||||
print a message with some details about each one.
|
||||
"""
|
||||
broken_specs = [(h, read_broken_spec(url_util.join(base_url, h))) for h in hashes]
|
||||
for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]:
|
||||
details = broken_spec["broken-spec"]
|
||||
if "job-name" in details:
|
||||
item_name = "{0}/{1}".format(details["job-name"], spec_hash[:7])
|
||||
else:
|
||||
item_name = spec_hash
|
||||
|
||||
if "job-stack" in details:
|
||||
item_name = "{0} (in stack {1})".format(item_name, details["job-stack"])
|
||||
|
||||
msg = " {0} was reported broken here: {1}".format(item_name, details["job-url"])
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def run_standalone_tests(**kwargs):
|
||||
"""Run stand-alone tests on the current spec.
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): dictionary of arguments used to run the tests
|
||||
|
||||
List of recognized keys:
|
||||
|
||||
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||
* "job_spec" (Spec): spec that was built
|
||||
* "repro_dir" (str): reproduction directory
|
||||
"""
|
||||
cdash = kwargs.get("cdash")
|
||||
fail_fast = kwargs.get("fail_fast")
|
||||
log_file = kwargs.get("log_file")
|
||||
|
||||
if cdash and log_file:
|
||||
tty.msg("The test log file {0} option is ignored with CDash reporting".format(log_file))
|
||||
log_file = None
|
||||
|
||||
# Error out but do NOT terminate if there are missing required arguments.
|
||||
job_spec = kwargs.get("job_spec")
|
||||
if not job_spec:
|
||||
tty.error("Job spec is required to run stand-alone tests")
|
||||
return
|
||||
|
||||
repro_dir = kwargs.get("repro_dir")
|
||||
if not repro_dir:
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
|
||||
test_args = [
|
||||
"spack",
|
||||
"-d",
|
||||
"-v",
|
||||
"test",
|
||||
"run",
|
||||
]
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
if cdash:
|
||||
test_args.extend(cdash.args())
|
||||
else:
|
||||
test_args.extend(["--log-format", "junit"])
|
||||
if log_file:
|
||||
test_args.extend(["--log-file", log_file])
|
||||
test_args.append(job_spec.name)
|
||||
|
||||
tty.debug("Running {0} stand-alone tests".format(job_spec.name))
|
||||
exit_code = process_command("test", test_args, repro_dir)
|
||||
|
||||
tty.debug("spack test exited {0}".format(exit_code))
|
||||
|
||||
|
||||
class CDashHandler(object):
|
||||
"""
|
||||
Class for managing CDash data and processing.
|
||||
"""
|
||||
|
||||
def __init__(self, ci_cdash):
|
||||
# start with the gitlab ci configuration
|
||||
self.url = ci_cdash.get("url")
|
||||
self.build_group = ci_cdash.get("build-group")
|
||||
self.project = ci_cdash.get("project")
|
||||
self.site = ci_cdash.get("site")
|
||||
|
||||
# grab the authorization token when available
|
||||
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
||||
if self.auth_token:
|
||||
tty.verbose("Using CDash auth token from environment")
|
||||
|
||||
# append runner description to the site if available
|
||||
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||
if runner:
|
||||
self.site += " ({0})".format(runner)
|
||||
|
||||
# track current spec, if any
|
||||
self.current_spec = None
|
||||
|
||||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
self.upload_url,
|
||||
"--cdash-build",
|
||||
self.build_name,
|
||||
"--cdash-site",
|
||||
self.site,
|
||||
"--cdash-buildstamp",
|
||||
self.build_stamp,
|
||||
]
|
||||
|
||||
@property # type: ignore
|
||||
def build_name(self):
|
||||
"""Returns the CDash build name.
|
||||
|
||||
A name will be generated if the `current_spec` property is set;
|
||||
otherwise, the value will be retrieved from the environment
|
||||
through the `SPACK_CDASH_BUILD_NAME` variable.
|
||||
|
||||
Returns: (str) current spec's CDash build name."""
|
||||
spec = self.current_spec
|
||||
if spec:
|
||||
build_name = "{0}@{1}%{2} hash={3} arch={4} ({5})".format(
|
||||
spec.name,
|
||||
spec.version,
|
||||
spec.compiler,
|
||||
spec.dag_hash(),
|
||||
spec.architecture,
|
||||
self.build_group,
|
||||
)
|
||||
tty.verbose(
|
||||
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
||||
)
|
||||
return build_name
|
||||
|
||||
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
return build_name
|
||||
|
||||
@property # type: ignore
|
||||
def build_stamp(self):
|
||||
"""Returns the CDash build stamp.
|
||||
|
||||
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
|
||||
is preferred due to the representation of timestamps; otherwise,
|
||||
one will be built.
|
||||
|
||||
Returns: (str) current CDash build stamp"""
|
||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||
if build_stamp:
|
||||
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
@property # type: ignore
|
||||
@memoized
|
||||
def project_enc(self):
|
||||
tty.debug("Encoding project ({0}): {1})".format(type(self.project), self.project))
|
||||
encode = urlencode({"project": self.project})
|
||||
index = encode.find("=") + 1
|
||||
return encode[index:]
|
||||
|
||||
@property
|
||||
def upload_url(self):
|
||||
url_format = "{0}/submit.php?project={1}"
|
||||
return url_format.format(self.url, self.project_enc)
|
||||
|
||||
def copy_test_results(self, source, dest):
|
||||
"""Copy test results to artifacts directory."""
|
||||
reports = fs.join_path(source, "*_Test*.xml")
|
||||
copy_files_to_artifacts(reports, dest)
|
||||
|
||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = "Creating buildgroup failed (response code = {0})".format(response_code)
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = "{0}/api/v1/buildgroup.php".format(self.url)
|
||||
|
||||
headers = {
|
||||
"Authorization": "Bearer {0}".format(self.auth_token),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
self.build_group,
|
||||
"Daily",
|
||||
)
|
||||
group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
"Latest {0}".format(self.build_group),
|
||||
"Latest",
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = "Failed to create or retrieve buildgroups for {0}".format(self.build_group)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
data = {
|
||||
"dynamiclist": [
|
||||
{
|
||||
"match": name,
|
||||
"parentgroupid": parent_group_id,
|
||||
"site": self.site,
|
||||
}
|
||||
for name in job_names
|
||||
],
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = "Error response code ({0}) in populate_buildgroup".format(response_code)
|
||||
tty.warn(msg)
|
||||
|
||||
def report_skipped(self, spec, directory_name, reason):
|
||||
cli_args = self.args()
|
||||
cli_args.extend(["package", [spec.name]])
|
||||
it = iter(cli_args)
|
||||
kv = {x.replace("--", "").replace("-", "_"): next(it) for x in it}
|
||||
|
||||
reporter = CDash(Bunch(**kv))
|
||||
reporter.test_skipped_report(directory_name, spec, reason)
|
||||
|
||||
@@ -291,19 +291,24 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
|
||||
elif first:
|
||||
return matching_specs[0]
|
||||
|
||||
elif len(matching_specs) > 1:
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string)
|
||||
for s in matching_specs
|
||||
]
|
||||
args += ["Use a more specific spec."]
|
||||
tty.die(*args)
|
||||
ensure_single_spec_or_die(spec, matching_specs)
|
||||
|
||||
return matching_specs[0]
|
||||
|
||||
|
||||
def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
]
|
||||
args += ["Use a more specific spec (e.g., prepend '/' to the hash)."]
|
||||
tty.die(*args)
|
||||
|
||||
|
||||
def gray_hash(spec, length):
|
||||
if not length:
|
||||
# default to maximum hash length
|
||||
@@ -640,3 +645,8 @@ def find_environment(args):
|
||||
return ev.Environment(env)
|
||||
|
||||
raise ev.SpackEnvironmentError("no environment in %s" % env)
|
||||
|
||||
|
||||
def first_line(docstring):
|
||||
"""Return the first line of the docstring."""
|
||||
return docstring.split("\n")[0]
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
@@ -15,7 +17,6 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
@@ -272,7 +273,12 @@ def setup_parser(subparser):
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
source = sync.add_mutually_exclusive_group(required=True)
|
||||
sync.add_argument(
|
||||
"--manifest-glob",
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
source.add_argument(
|
||||
"--src-directory", metavar="DIRECTORY", type=str, help="Source mirror as a local file path"
|
||||
)
|
||||
@@ -282,7 +288,7 @@ def setup_parser(subparser):
|
||||
source.add_argument(
|
||||
"--src-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the source mirror"
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=True)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
@@ -615,6 +621,31 @@ def copy_fn(args):
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
|
||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
"""Copy from source url to destination url"""
|
||||
tmpdir = None
|
||||
|
||||
if not local_path:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
|
||||
try:
|
||||
temp_stage = Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except web_util.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
temp_stage.destroy()
|
||||
finally:
|
||||
if tmpdir and os.path.exists(tmpdir):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
@@ -623,6 +654,10 @@ def sync_fn(args):
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# Figure out the source mirror
|
||||
source_location = None
|
||||
if args.src_directory:
|
||||
@@ -688,8 +723,9 @@ def sync_fn(args):
|
||||
buildcache_rel_paths.extend(
|
||||
[
|
||||
os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -702,24 +738,31 @@ def sync_fn(args):
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path))
|
||||
|
||||
stage = Stage(
|
||||
src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True
|
||||
)
|
||||
|
||||
try:
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except fs.FetchError as e:
|
||||
tty.debug("spack buildcache unable to sync {0}".format(rel_path))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
stage.destroy()
|
||||
copy_buildcache_file(src_url, dest_url, local_path=local_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
in the manifest files."""
|
||||
deduped_manifest = {}
|
||||
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path) as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
||||
|
||||
|
||||
def update_index(mirror_url, update_keys=False):
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
outdir = url_util.format(mirror.push_url)
|
||||
|
||||
51
lib/spack/spack/cmd/change.py
Normal file
51
lib/spack/spack/cmd/change.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "change an existing spec in an environment"
|
||||
section = "environments"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--list-name",
|
||||
dest="list_name",
|
||||
default="specs",
|
||||
help="name of the list to remove specs from",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--match-spec",
|
||||
dest="match_spec",
|
||||
help="if name is ambiguous, supply a spec to match",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="change all matching specs (allow changing more than one spec)",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
|
||||
|
||||
def change(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="change")
|
||||
|
||||
with env.write_transaction():
|
||||
if args.match_spec:
|
||||
match_spec = spack.spec.Spec(args.match_spec)
|
||||
else:
|
||||
match_spec = None
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
env.change_existing_spec(
|
||||
spec,
|
||||
list_name=args.list_name,
|
||||
match_spec=match_spec,
|
||||
allow_changing_multiple_specs=args.all,
|
||||
)
|
||||
env.write()
|
||||
@@ -6,13 +6,9 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from six.moves.urllib.parse import urlencode
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -22,7 +18,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -34,6 +29,10 @@
|
||||
INSTALL_FAIL_CODE = 1
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
if variable_name in os.environ:
|
||||
return os.environ.get(variable_name)
|
||||
@@ -45,27 +44,35 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
|
||||
# Dynamic generation of the jobs yaml from a spack environment
|
||||
generate = subparsers.add_parser("generate", help=ci_generate.__doc__)
|
||||
generate = subparsers.add_parser(
|
||||
"generate",
|
||||
description=deindent(ci_generate.__doc__),
|
||||
help=spack.cmd.first_line(ci_generate.__doc__),
|
||||
)
|
||||
generate.add_argument(
|
||||
"--output-file",
|
||||
default=None,
|
||||
help="Path to file where generated jobs file should be "
|
||||
+ "written. The default is .gitlab-ci.yml in the root of the "
|
||||
+ "repository.",
|
||||
help="""pathname for the generated gitlab ci yaml file
|
||||
Path to the file where generated jobs file should
|
||||
be written. Default is .gitlab-ci.yml in the root of
|
||||
the repository.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="Absolute path of additional location where generated jobs "
|
||||
+ "yaml file should be copied. Default is not to copy.",
|
||||
help="""path to additional directory for job files
|
||||
This option provides an absolute path to a directory
|
||||
where the generated jobs yaml file should be copied.
|
||||
Default is not to copy.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(Experimental) run the generated document through a series of "
|
||||
"optimization passes designed to reduce the size of the "
|
||||
"generated file.",
|
||||
help="""(Experimental) optimize the gitlab yaml file for size
|
||||
Run the generated document through a series of
|
||||
optimization passes designed to reduce the size
|
||||
of the generated file.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
@@ -86,53 +93,84 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""Do not generate jobs for specs already up to
|
||||
date on the mirror""",
|
||||
help="""skip up-to-date specs
|
||||
Do not generate jobs for specs that are up-to-date
|
||||
on the mirror.""",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""Generate jobs for specs already up to date
|
||||
on the mirror""",
|
||||
help="""process up-to-date specs
|
||||
Generate jobs for specs even when they are up-to-date
|
||||
on the mirror.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
dest="index_only",
|
||||
default=False,
|
||||
help="""Spack always check specs against configured
|
||||
binary mirrors when generating the pipeline, regardless of whether or not
|
||||
DAG pruning is enabled. This flag controls whether it might attempt to
|
||||
fetch remote spec files directly (ensuring no spec is rebuilt if it
|
||||
is present on the mirror), or whether it should reduce pipeline generation time
|
||||
by assuming all remote buildcache indices are up to date and only use those
|
||||
to determine whether a given spec is up to date on mirrors. In the latter
|
||||
case, specs might be needlessly rebuilt if remote buildcache indices are out
|
||||
of date.""",
|
||||
help="""only check spec state from buildcache indices
|
||||
Spack always checks specs against configured binary
|
||||
mirrors, regardless of the DAG pruning option.
|
||||
If enabled, Spack will assume all remote buildcache
|
||||
indices are up-to-date when assessing whether the spec
|
||||
on the mirror, if present, is up-to-date. This has the
|
||||
benefit of reducing pipeline generation time but at the
|
||||
potential cost of needlessly rebuilding specs when the
|
||||
indices are outdated.
|
||||
If not enabled, Spack will fetch remote spec files
|
||||
directly to assess whether the spec on the mirror is
|
||||
up-to-date.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
help="""Path to root of artifacts directory. If provided, concrete
|
||||
environment files (spack.yaml, spack.lock) will be generated under this
|
||||
path and their location sent to generated child jobs via the custom job
|
||||
variable SPACK_CONCRETE_ENVIRONMENT_PATH.""",
|
||||
help="""path to the root of the artifacts directory
|
||||
If provided, concrete environment files (spack.yaml,
|
||||
spack.lock) will be generated under this directory.
|
||||
Their location will be passed to generated child jobs
|
||||
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
# Rebuild the buildcache index associated with the mirror in the
|
||||
# active, gitlab-enabled environment.
|
||||
index = subparsers.add_parser("rebuild-index", help=ci_reindex.__doc__)
|
||||
index = subparsers.add_parser(
|
||||
"rebuild-index",
|
||||
description=deindent(ci_reindex.__doc__),
|
||||
help=spack.cmd.first_line(ci_reindex.__doc__),
|
||||
)
|
||||
index.set_defaults(func=ci_reindex)
|
||||
|
||||
# Handle steps of a ci build/rebuild
|
||||
rebuild = subparsers.add_parser("rebuild", help=ci_rebuild.__doc__)
|
||||
rebuild = subparsers.add_parser(
|
||||
"rebuild",
|
||||
description=deindent(ci_rebuild.__doc__),
|
||||
help=spack.cmd.first_line(ci_rebuild.__doc__),
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"-t",
|
||||
"--tests",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""run stand-alone tests after the build""",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""stop stand-alone tests after the first failure""",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
|
||||
# Facilitate reproduction of a failed CI build job
|
||||
reproduce = subparsers.add_parser("reproduce-build", help=ci_reproduce.__doc__)
|
||||
reproduce = subparsers.add_parser(
|
||||
"reproduce-build",
|
||||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
@@ -144,12 +182,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""Generate jobs file from a spack environment file containing CI info.
|
||||
Before invoking this command, you can set the environment variable
|
||||
SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
|
||||
for creating a build group for the generated workload and registering
|
||||
all generated jobs under that build group. If this environment
|
||||
variable is not set, no build group will be created on CDash."""
|
||||
"""Generate jobs file from a CI-aware spack file.
|
||||
|
||||
If you want to report the results on CDash, you will need to set
|
||||
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||
value must be the CDash authorization token needed to create a
|
||||
build group and register all generated jobs under it."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
@@ -190,8 +228,10 @@ def ci_generate(args):
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index associated with the mirror in the
|
||||
active, gitlab-enabled environment."""
|
||||
"""Rebuild the buildcache index for the remote mirror.
|
||||
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
@@ -206,17 +246,16 @@ def ci_reindex(args):
|
||||
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
"""Rebuild a spec if it is not on the remote mirror.
|
||||
|
||||
Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
gitlab_ci = None
|
||||
if "gitlab-ci" in yaml_root:
|
||||
gitlab_ci = yaml_root["gitlab-ci"]
|
||||
|
||||
gitlab_ci = yaml_root["gitlab-ci"] if "gitlab-ci" in yaml_root else None
|
||||
if not gitlab_ci:
|
||||
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
|
||||
|
||||
@@ -231,6 +270,7 @@ def ci_rebuild(args):
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
|
||||
job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = get_env_var("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
|
||||
local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
|
||||
@@ -240,15 +280,17 @@ def ci_rebuild(args):
|
||||
root_spec = get_env_var("SPACK_ROOT_SPEC")
|
||||
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
||||
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
||||
cdash_build_name = get_env_var("SPACK_CDASH_BUILD_NAME")
|
||||
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
||||
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = get_env_var("CI_PROJECT_DIR")
|
||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||
@@ -263,23 +305,15 @@ def ci_rebuild(args):
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
# support that task.
|
||||
enable_cdash = False
|
||||
if "cdash" in yaml_root:
|
||||
enable_cdash = True
|
||||
ci_cdash = yaml_root["cdash"]
|
||||
job_spec_buildgroup = ci_cdash["build-group"]
|
||||
cdash_base_url = ci_cdash["url"]
|
||||
cdash_project = ci_cdash["project"]
|
||||
proj_enc = urlencode({"project": cdash_project})
|
||||
eq_idx = proj_enc.find("=") + 1
|
||||
cdash_project_enc = proj_enc[eq_idx:]
|
||||
cdash_site = ci_cdash["site"]
|
||||
tty.debug("cdash_base_url = {0}".format(cdash_base_url))
|
||||
tty.debug("cdash_project = {0}".format(cdash_project))
|
||||
tty.debug("cdash_project_enc = {0}".format(cdash_project_enc))
|
||||
tty.debug("cdash_build_name = {0}".format(cdash_build_name))
|
||||
tty.debug("cdash_site = {0}".format(cdash_site))
|
||||
tty.debug("job_spec_buildgroup = {0}".format(job_spec_buildgroup))
|
||||
cdash_handler = spack_ci.CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
if cdash_handler:
|
||||
tty.debug("cdash url = {0}".format(cdash_handler.url))
|
||||
tty.debug("cdash project = {0}".format(cdash_handler.project))
|
||||
tty.debug("cdash project_enc = {0}".format(cdash_handler.project_enc))
|
||||
tty.debug("cdash build_name = {0}".format(cdash_handler.build_name))
|
||||
tty.debug("cdash build_stamp = {0}".format(cdash_handler.build_stamp))
|
||||
tty.debug("cdash site = {0}".format(cdash_handler.site))
|
||||
tty.debug("cdash build_group = {0}".format(cdash_handler.build_group))
|
||||
|
||||
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
||||
# be neither, e.g. a pipeline run on some environment repository.
|
||||
@@ -292,6 +326,8 @@ def ci_rebuild(args):
|
||||
)
|
||||
)
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
@@ -344,6 +380,9 @@ def ci_rebuild(args):
|
||||
if os.path.exists(job_log_dir):
|
||||
shutil.rmtree(job_log_dir)
|
||||
|
||||
if os.path.exists(job_test_dir):
|
||||
shutil.rmtree(job_test_dir)
|
||||
|
||||
if os.path.exists(repro_dir):
|
||||
shutil.rmtree(repro_dir)
|
||||
|
||||
@@ -351,6 +390,7 @@ def ci_rebuild(args):
|
||||
# need for storing artifacts. The cdash_report directory will be
|
||||
# created internally if needed.
|
||||
os.makedirs(job_log_dir)
|
||||
os.makedirs(job_test_dir)
|
||||
os.makedirs(repro_dir)
|
||||
|
||||
# Copy the concrete environment files to the repro directory so we can
|
||||
@@ -411,6 +451,8 @@ def ci_rebuild(args):
|
||||
fd.write(spack_info.encode("utf8"))
|
||||
fd.write(b"\n")
|
||||
|
||||
pipeline_mirrors = []
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
@@ -418,22 +460,29 @@ def ci_rebuild(args):
|
||||
spack.mirror.add(
|
||||
spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope()
|
||||
)
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == "spack_protected_branch":
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||
matches = (
|
||||
None
|
||||
if full_rebuild
|
||||
else bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||
)
|
||||
)
|
||||
|
||||
if matches:
|
||||
@@ -456,6 +505,13 @@ def ci_rebuild(args):
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
|
||||
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
if full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
@@ -468,7 +524,10 @@ def ci_rebuild(args):
|
||||
install_args.extend(
|
||||
[
|
||||
"install",
|
||||
"--show-log-on-error", # Print full log on fails
|
||||
"--keep-stage",
|
||||
"--use-buildcache",
|
||||
"dependencies:only,package:never",
|
||||
]
|
||||
)
|
||||
|
||||
@@ -477,22 +536,9 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
if enable_cdash:
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
cdash_upload_url = "{0}/submit.php?project={1}".format(cdash_base_url, cdash_project_enc)
|
||||
|
||||
install_args.extend(
|
||||
[
|
||||
"--cdash-upload-url",
|
||||
cdash_upload_url,
|
||||
"--cdash-build",
|
||||
cdash_build_name,
|
||||
"--cdash-site",
|
||||
cdash_site,
|
||||
"--cdash-track",
|
||||
job_spec_buildgroup,
|
||||
]
|
||||
)
|
||||
install_args.extend(cdash_handler.args())
|
||||
|
||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
||||
# compiler or one of its deps.
|
||||
@@ -500,35 +546,11 @@ def ci_rebuild(args):
|
||||
if compiler_action != "FIND_ANY":
|
||||
install_args.append("--no-add")
|
||||
|
||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||
# to identify the spec to install, rather than the concrete spec
|
||||
# json file.
|
||||
install_args.extend(["-f", job_spec_json_path])
|
||||
# Identify spec to install by hash
|
||||
install_args.append("/{0}".format(job_spec.dag_hash()))
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
tty.debug("spack install arguments: {0}".format(install_args))
|
||||
|
||||
# Write the install command to a shell script
|
||||
with open("install.sh", "w") as fd:
|
||||
fd.write("#!/bin/bash\n\n")
|
||||
fd.write("\n# spack install command\n")
|
||||
fd.write(" ".join(['"{0}"'.format(i) for i in install_args]))
|
||||
fd.write("\n")
|
||||
|
||||
st = os.stat("install.sh")
|
||||
os.chmod("install.sh", st.st_mode | stat.S_IEXEC)
|
||||
|
||||
install_copy_path = os.path.join(repro_dir, "install.sh")
|
||||
shutil.copyfile("install.sh", install_copy_path)
|
||||
|
||||
# Run the generated install.sh shell script
|
||||
try:
|
||||
install_process = subprocess.Popen(["bash", "./install.sh"])
|
||||
install_process.wait()
|
||||
install_exit_code = install_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as inst:
|
||||
tty.error("Encountered error running install script")
|
||||
tty.error(inst)
|
||||
install_exit_code = spack_ci.process_command("install", install_args, repro_dir)
|
||||
|
||||
# Now do the post-install tasks
|
||||
tty.debug("spack install exited {0}".format(install_exit_code))
|
||||
@@ -543,61 +565,92 @@ def ci_rebuild(args):
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path))
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, "empty.txt")
|
||||
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-url": get_env_var("CI_JOB_URL"),
|
||||
"pipeline-url": get_env_var("CI_PIPELINE_URL"),
|
||||
"concrete-spec-dict": job_spec.to_dict(hash=ht.dag_hash),
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(empty_file_path, "w") as efd:
|
||||
efd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
empty_file_path,
|
||||
broken_spec_path,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Error writing to broken specs list {0}: {1}".format(broken_spec_path, err)
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
spack_ci.write_broken_spec(
|
||||
broken_spec_path,
|
||||
job_spec_pkg_name,
|
||||
spack_ci_stack_name,
|
||||
get_env_var("CI_JOB_URL"),
|
||||
get_env_var("CI_PIPELINE_URL"),
|
||||
job_spec.to_dict(hash=ht.dag_hash),
|
||||
)
|
||||
|
||||
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
broken_tests = (
|
||||
"broken-tests-packages" in gitlab_ci
|
||||
and job_spec.name in gitlab_ci["broken-tests-packages"]
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn(
|
||||
"Unable to run stand-alone tests since listed in "
|
||||
"gitlab-ci's 'broken-tests-packages'"
|
||||
)
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in gitlab-ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
elif args.tests:
|
||||
if install_exit_code == 0:
|
||||
try:
|
||||
# First ensure we will use a reasonable test stage directory
|
||||
stage_root = os.path.dirname(str(job_spec.package.stage.path))
|
||||
test_stage = fs.join_path(stage_root, "spack-standalone-tests")
|
||||
tty.debug("Configuring test_stage to {0}".format(test_stage))
|
||||
config_test_path = "config:test_stage:{0}".format(test_stage)
|
||||
cfg.add(config_test_path, scope=cfg.default_modify_scope())
|
||||
|
||||
# Run the tests, resorting to junit results if not using cdash
|
||||
log_file = (
|
||||
None if cdash_handler else fs.join_path(test_stage, "ci-test-results.xml")
|
||||
)
|
||||
spack_ci.run_standalone_tests(
|
||||
cdash=cdash_handler,
|
||||
job_spec=job_spec,
|
||||
fail_fast=args.fail_fast,
|
||||
log_file=log_file,
|
||||
repro_dir=repro_dir,
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
# If there is any error, just print a warning.
|
||||
msg = "Error processing stand-alone tests: {0}".format(str(err))
|
||||
tty.warn(msg)
|
||||
|
||||
finally:
|
||||
# Copy the test log/results files
|
||||
spack_ci.copy_test_logs_to_artifacts(test_stage, job_test_dir)
|
||||
if cdash_handler:
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
elif log_file:
|
||||
spack_ci.copy_files_to_artifacts(log_file, job_test_dir)
|
||||
else:
|
||||
tty.warn("No recognized test results reporting option")
|
||||
|
||||
else:
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||
if cdash_handler:
|
||||
msg = "Failed to install the package"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
can_sign = spack_ci.can_sign_binaries()
|
||||
sign_binaries = can_sign and spack_is_pr_pipeline is False
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||
if buildcache_mirror_url or pipeline_mirror_url:
|
||||
spack_ci.create_buildcache(
|
||||
env=env,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
pr_pipeline=spack_is_pr_pipeline,
|
||||
json_path=job_spec_json_path,
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
@@ -611,13 +664,11 @@ def ci_rebuild(args):
|
||||
try:
|
||||
web_util.remove_url(broken_spec_path)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Error removing {0} from broken specs list: {1}".format(
|
||||
broken_spec_path, err
|
||||
)
|
||||
tty.warn(msg)
|
||||
# hierarchy is Exception. Just print a warning and return.
|
||||
msg = "Error removing {0} from broken specs list: {1}"
|
||||
tty.warn(msg.format(broken_spec_path, err))
|
||||
|
||||
else:
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
@@ -654,6 +705,10 @@ def ci_rebuild(args):
|
||||
|
||||
|
||||
def ci_reproduce(args):
|
||||
"""Generate instructions for reproducing the spec rebuild job.
|
||||
|
||||
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||
used to derive instructions for reproducing the build locally."""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ def _get_scope_and_section(args):
|
||||
path = getattr(args, "path", None)
|
||||
|
||||
# w/no args and an active environment, point to env manifest
|
||||
if not section:
|
||||
if not section and not scope:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
scope = env.env_file_config_scope_name()
|
||||
@@ -258,7 +258,7 @@ def config_update(args):
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
scope_dir = scope.path
|
||||
scope_dir = os.path.dirname(scope.path)
|
||||
can_be_updated = _can_update_config_file(scope_dir, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.util.path
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
@@ -52,7 +53,7 @@ def develop(parser, args):
|
||||
# download all dev specs
|
||||
for name, entry in env.dev_specs.items():
|
||||
path = entry.get("path", name)
|
||||
abspath = path if os.path.isabs(path) else os.path.join(env.path, path)
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
|
||||
if os.path.exists(abspath):
|
||||
msg = "Skipping developer download of %s" % entry["spec"]
|
||||
@@ -79,11 +80,7 @@ def develop(parser, args):
|
||||
|
||||
# default path is relative path to spec.name
|
||||
path = args.path or spec.name
|
||||
|
||||
# get absolute path to check
|
||||
abspath = path
|
||||
if not os.path.isabs(abspath):
|
||||
abspath = os.path.join(env.path, path)
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
|
||||
# clone default: only if the path doesn't exist
|
||||
clone = args.clone
|
||||
|
||||
@@ -198,10 +198,12 @@ def diff(parser, args):
|
||||
if len(args.specs) != 2:
|
||||
tty.die("You must provide two specs to diff.")
|
||||
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.specs)
|
||||
]
|
||||
specs = []
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
if spec.concrete:
|
||||
specs.append(spec)
|
||||
else:
|
||||
specs.append(spack.cmd.disambiguate_spec(spec, env, first=args.load_first))
|
||||
|
||||
# Calculate the comparison (c)
|
||||
color = False if args.dump_json else get_color_when()
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.environment.shell
|
||||
import spack.schema.env
|
||||
import spack.tengine
|
||||
import spack.util.string as string
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
@@ -641,6 +642,9 @@ def get_target(name):
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, ".install", name)
|
||||
|
||||
def get_install_deps_target(name):
|
||||
return os.path.join(target_prefix, ".install-deps", name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
@@ -655,76 +659,38 @@ def get_install_target(name):
|
||||
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
all_install_deps_targets = [get_install_deps_target(h) for h, _ in hash_to_prereqs.items()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write(
|
||||
"""SPACK ?= spack
|
||||
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
||||
|
||||
.PHONY: {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(
|
||||
get_target("all"),
|
||||
get_target("clean"),
|
||||
get_target("all"),
|
||||
get_target("env"),
|
||||
get_target("env"),
|
||||
" ".join(root_install_targets),
|
||||
get_target("dirs"),
|
||||
get_target(".install"),
|
||||
get_target(".install/%"),
|
||||
get_target("dirs"),
|
||||
"+" if args.jobserver else "",
|
||||
env.path,
|
||||
)
|
||||
)
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
# The prefix can be an empty string, in that case we don't add the `/`.
|
||||
# The name is currently the dag hash of the spec. In principle it
|
||||
# could be the package name in case of `concretization: together` so
|
||||
# it can be more easily referred to, but for now we don't special case
|
||||
# this.
|
||||
fmt = "{name}{@version}{%compiler}{variants}{arch=architecture}"
|
||||
hash_with_name = [(h, hash_to_spec[h].format(fmt)) for h in hash_to_prereqs.keys()]
|
||||
targets_to_prereqs = [
|
||||
(get_install_deps_target(h), " ".join(prereqs)) for h, prereqs in hash_to_prereqs.items()
|
||||
]
|
||||
|
||||
# Set SPEC for each hash
|
||||
buf.write("# Set the human-readable spec for each target\n")
|
||||
for dag_hash in hash_to_prereqs.keys():
|
||||
formatted_spec = hash_to_spec[dag_hash].format(fmt)
|
||||
buf.write("{}: SPEC = {}\n".format(get_target("%/" + dag_hash), formatted_spec))
|
||||
buf.write("\n")
|
||||
|
||||
# Set install dependencies
|
||||
buf.write("# Install dependencies\n")
|
||||
for parent, children in hash_to_prereqs.items():
|
||||
if not children:
|
||||
continue
|
||||
buf.write("{}: {}\n".format(get_install_target(parent), " ".join(children)))
|
||||
buf.write("\n")
|
||||
|
||||
# Clean target: remove target files but not their folders, cause
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write(
|
||||
"{}:\n\trm -f -- {} {}\n".format(
|
||||
get_target("clean"), get_target("env"), " ".join(all_install_targets)
|
||||
)
|
||||
rendered = template.render(
|
||||
{
|
||||
"all_target": get_target("all"),
|
||||
"env_target": get_target("env"),
|
||||
"clean_target": get_target("clean"),
|
||||
"all_install_targets": " ".join(all_install_targets),
|
||||
"all_install_deps_targets": " ".join(all_install_deps_targets),
|
||||
"root_install_targets": " ".join(root_install_targets),
|
||||
"dirs_target": get_target("dirs"),
|
||||
"environment": env.path,
|
||||
"install_target": get_target(".install"),
|
||||
"install_deps_target": get_target(".install-deps"),
|
||||
"any_hash_target": get_target("%"),
|
||||
"hash_with_name": hash_with_name,
|
||||
"jobserver_support": "+" if args.jobserver else "",
|
||||
"targets_to_prereqs": targets_to_prereqs,
|
||||
}
|
||||
)
|
||||
|
||||
buf.write(rendered)
|
||||
makefile = buf.getvalue()
|
||||
|
||||
# Finally write to stdout/file.
|
||||
|
||||
@@ -219,7 +219,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
except spack.error.SpackError as e:
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
|
||||
@@ -203,7 +203,16 @@ def decorator(spec, fmt):
|
||||
return decorator, added, roots, removed
|
||||
|
||||
|
||||
def display_env(env, args, decorator):
|
||||
def display_env(env, args, decorator, results):
|
||||
"""Display extra find output when running in an environment.
|
||||
|
||||
Find in an environment outputs 2 or 3 sections:
|
||||
|
||||
1. Root specs
|
||||
2. Concretized roots (if asked for with -c)
|
||||
3. Installed specs
|
||||
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
@@ -234,6 +243,12 @@ def display_env(env, args, decorator):
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
print()
|
||||
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
if args.bootstrap:
|
||||
@@ -286,10 +301,11 @@ def _find(parser, args):
|
||||
else:
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator)
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
@@ -31,10 +32,50 @@
|
||||
level = "short"
|
||||
|
||||
|
||||
# Pass in the value string passed to use-buildcache and get back
|
||||
# the package and dependencies values.
|
||||
def parse_use_buildcache(opt):
|
||||
bc_keys = ["package:", "dependencies:", ""]
|
||||
bc_values = ["only", "never", "auto"]
|
||||
kv_list = re.findall("([a-z]+:)?([a-z]+)", opt)
|
||||
|
||||
# Verify keys and values
|
||||
bc_map = {k: v for k, v in kv_list if k in bc_keys and v in bc_values}
|
||||
if not len(kv_list) == len(bc_map):
|
||||
tty.error("Unrecognized arguments passed to use-buildcache")
|
||||
tty.error(
|
||||
"Expected: --use-buildcache "
|
||||
"[[auto|only|never],[package:[auto|only|never]],[dependencies:[auto|only|never]]]"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
for _group in ["package:", "dependencies:"]:
|
||||
if _group not in bc_map:
|
||||
if "" in bc_map:
|
||||
bc_map[_group] = bc_map[""]
|
||||
else:
|
||||
bc_map[_group] = "auto"
|
||||
|
||||
return bc_map["package:"], bc_map["dependencies:"]
|
||||
|
||||
|
||||
# Determine value of cache flag
|
||||
def cache_opt(default_opt, use_buildcache):
|
||||
if use_buildcache == "auto":
|
||||
return default_opt
|
||||
elif use_buildcache == "only":
|
||||
return True
|
||||
elif use_buildcache == "never":
|
||||
return False
|
||||
|
||||
|
||||
def install_kwargs_from_args(args):
|
||||
"""Translate command line arguments into a dictionary that will be passed
|
||||
to the package installer.
|
||||
"""
|
||||
|
||||
pkg_use_bc, dep_use_bc = parse_use_buildcache(args.use_buildcache)
|
||||
|
||||
return {
|
||||
"fail_fast": args.fail_fast,
|
||||
"keep_prefix": args.keep_prefix,
|
||||
@@ -44,8 +85,10 @@ def install_kwargs_from_args(args):
|
||||
"verbose": args.verbose or args.install_verbose,
|
||||
"fake": args.fake,
|
||||
"dirty": args.dirty,
|
||||
"use_cache": args.use_cache,
|
||||
"cache_only": args.cache_only,
|
||||
"package_use_cache": cache_opt(args.use_cache, pkg_use_bc),
|
||||
"package_cache_only": cache_opt(args.cache_only, pkg_use_bc),
|
||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||
"include_build_deps": args.include_build_deps,
|
||||
"explicit": True, # Use true as a default for install command
|
||||
"stop_at": args.until,
|
||||
@@ -123,6 +166,18 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="only install package from binary mirrors",
|
||||
)
|
||||
cache_group.add_argument(
|
||||
"--use-buildcache",
|
||||
dest="use_buildcache",
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
||||
Default: package:auto,dependencies:auto
|
||||
- `auto` behaves like --use-cache
|
||||
- `only` behaves like --cache-only
|
||||
- `never` behaves like --no-cache
|
||||
""",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--include-build-deps",
|
||||
@@ -236,6 +291,7 @@ def install_specs(specs, install_kwargs, cli_args):
|
||||
except spack.build_environment.InstallError as e:
|
||||
if cli_args.show_log_on_error:
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
|
||||
@@ -54,7 +54,6 @@
|
||||
r"^share/spack/.*\.fish$",
|
||||
r"^share/spack/qa/run-[^/]*$",
|
||||
r"^share/spack/bash/spack-completion.in$",
|
||||
r"^share/spack/templates/misc/coconcretization.pyt$",
|
||||
# action workflows
|
||||
r"^.github/actions/.*\.py$",
|
||||
# all packages
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
@@ -72,6 +73,7 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="include virtual packages in list",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["tags"])
|
||||
|
||||
|
||||
def filter_by_name(pkgs, args):
|
||||
@@ -120,9 +122,9 @@ def match(p, f):
|
||||
@formatter
|
||||
def name_only(pkgs, out):
|
||||
indent = 0
|
||||
if out.isatty():
|
||||
tty.msg("%d packages." % len(pkgs))
|
||||
colify(pkgs, indent=indent, output=out)
|
||||
if out.isatty():
|
||||
tty.msg("%d packages" % len(pkgs))
|
||||
|
||||
|
||||
def github_url(pkg):
|
||||
@@ -306,6 +308,11 @@ def list(parser, args):
|
||||
# Filter the set appropriately
|
||||
sorted_packages = filter_by_name(pkgs, args)
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
|
||||
@@ -30,6 +30,12 @@ def setup_parser(subparser):
|
||||
help="print the Python version number and exit",
|
||||
)
|
||||
subparser.add_argument("-c", dest="python_command", help="command to execute")
|
||||
subparser.add_argument(
|
||||
"-u",
|
||||
dest="unbuffered",
|
||||
action="store_true",
|
||||
help="for compatibility with xdist, do not use without adding -u to the interpreter",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-i",
|
||||
dest="python_interpreter",
|
||||
|
||||
@@ -29,17 +29,14 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def first_line(docstring):
|
||||
"""Return the first line of the docstring."""
|
||||
return docstring.split("\n")[0]
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="test_command")
|
||||
|
||||
# Run
|
||||
run_parser = sp.add_parser(
|
||||
"run", description=test_run.__doc__, help=first_line(test_run.__doc__)
|
||||
"run",
|
||||
description=test_run.__doc__,
|
||||
help=spack.cmd.first_line(test_run.__doc__),
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
@@ -83,7 +80,9 @@ def setup_parser(subparser):
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser(
|
||||
"list", description=test_list.__doc__, help=first_line(test_list.__doc__)
|
||||
"list",
|
||||
description=test_list.__doc__,
|
||||
help=spack.cmd.first_line(test_list.__doc__),
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"-a",
|
||||
@@ -97,7 +96,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find", description=test_find.__doc__, help=first_line(test_find.__doc__)
|
||||
"find",
|
||||
description=test_find.__doc__,
|
||||
help=spack.cmd.first_line(test_find.__doc__),
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
@@ -107,7 +108,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Status
|
||||
status_parser = sp.add_parser(
|
||||
"status", description=test_status.__doc__, help=first_line(test_status.__doc__)
|
||||
"status",
|
||||
description=test_status.__doc__,
|
||||
help=spack.cmd.first_line(test_status.__doc__),
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
@@ -115,7 +118,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Results
|
||||
results_parser = sp.add_parser(
|
||||
"results", description=test_results.__doc__, help=first_line(test_results.__doc__)
|
||||
"results",
|
||||
description=test_results.__doc__,
|
||||
help=spack.cmd.first_line(test_results.__doc__),
|
||||
)
|
||||
results_parser.add_argument(
|
||||
"-l", "--logs", action="store_true", help="print the test log for each matching package"
|
||||
@@ -142,7 +147,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
"remove", description=test_remove.__doc__, help=first_line(test_remove.__doc__)
|
||||
"remove",
|
||||
description=test_remove.__doc__,
|
||||
help=spack.cmd.first_line(test_remove.__doc__),
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
@@ -191,6 +198,16 @@ def test_run(args):
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes)
|
||||
if spec and not matching:
|
||||
tty.warn("No installed packages match spec %s" % spec)
|
||||
"""
|
||||
TODO: Need to write out a log message and/or CDASH Testing
|
||||
output that package not installed IF continue to process
|
||||
these issues here.
|
||||
|
||||
if args.log_format:
|
||||
# Proceed with the spec assuming the test process
|
||||
# to ensure report package as skipped (e.g., for CI)
|
||||
specs_to_test.append(spec)
|
||||
"""
|
||||
specs_to_test.extend(matching)
|
||||
|
||||
# test_stage_dir
|
||||
|
||||
@@ -35,7 +35,6 @@
|
||||
"""
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.link_tree import MergeConflictError
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
@@ -66,16 +65,7 @@ def squash(matching_specs):
|
||||
tty.die("Spec matches no installed packages.")
|
||||
|
||||
matching_in_view = [ms for ms in matching_specs if ms in view_specs]
|
||||
|
||||
if len(matching_in_view) > 1:
|
||||
spec_format = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
args = ["Spec matches multiple packages.", "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(spec_format)
|
||||
for s in matching_in_view
|
||||
]
|
||||
args += ["Use a more specific spec."]
|
||||
tty.die(*args)
|
||||
spack.cmd.ensure_single_spec_or_die("Spec", matching_in_view)
|
||||
|
||||
return matching_in_view[0] if matching_in_view else matching_specs[0]
|
||||
|
||||
|
||||
@@ -537,6 +537,14 @@ def get_real_version(self):
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
"""Query the compiler for its install prefix. This is the install
|
||||
path as reported by the compiler. Note that paths for cc, cxx, etc
|
||||
are not enough to find the install prefix of the compiler, since
|
||||
the can be symlinks, wrappers, or filenames instead of absolute paths."""
|
||||
raise NotImplementedError("prefix is not implemented for this compiler")
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
|
||||
@@ -6,8 +6,11 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from llnl.util.filesystem import ancestor
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.apple_clang as apple_clang
|
||||
import spack.util.executable
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
@@ -196,3 +199,21 @@ def f77_version(cls, f77):
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
# GCC reports its install prefix when running ``-print-search-dirs``
|
||||
# on the first line ``install: <prefix>``.
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
gcc_output = cc("-print-search-dirs", output=str, error=str)
|
||||
|
||||
for line in gcc_output.splitlines():
|
||||
if line.startswith("install:"):
|
||||
gcc_prefix = line.split(":")[1].strip()
|
||||
# Go from <prefix>/lib/gcc/<triplet>/<version>/ to <prefix>
|
||||
return ancestor(gcc_prefix, 4)
|
||||
|
||||
raise RuntimeError(
|
||||
"could not find install prefix of GCC from output:\n\t{}".format(gcc_output)
|
||||
)
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import os.path
|
||||
import platform
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
@@ -25,7 +24,6 @@
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -38,6 +36,7 @@
|
||||
import spack.spec
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
@@ -91,7 +90,7 @@ def concretize_develop(self, spec):
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
|
||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
||||
|
||||
if "dev_path" in spec.variants:
|
||||
assert spec.variants["dev_path"].value == path
|
||||
@@ -752,37 +751,20 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
def make_concretization_repository(abstract_specs):
|
||||
"""Returns the path to a temporary repository created to contain
|
||||
a fake package that depends on all of the abstract specs.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
repo_path, _ = spack.repo.create_repo(tmpdir)
|
||||
|
||||
debug_msg = "[CONCRETIZATION]: Creating helper repository in {0}"
|
||||
tty.debug(debug_msg.format(repo_path))
|
||||
|
||||
pkg_dir = os.path.join(repo_path, "packages", "concretizationroot")
|
||||
fs.mkdirp(pkg_dir)
|
||||
environment = spack.tengine.make_environment()
|
||||
template = environment.get_template("misc/coconcretization.pyt")
|
||||
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec in abstract_specs for dep in spec.traverse(root=True)
|
||||
]
|
||||
|
||||
with open(os.path.join(pkg_dir, "package.py"), "w") as f:
|
||||
f.write(template.render(specs=[str(s) for s in split_specs]))
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
concretization_repository = make_concretization_repository(abstract_specs)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
|
||||
]
|
||||
builder.add_package(
|
||||
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
|
||||
)
|
||||
|
||||
with spack.repo.additional_repository(concretization_repository):
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec("concretizationroot")
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
|
||||
@@ -64,6 +64,7 @@
|
||||
|
||||
# Hacked yaml for configuration files preserves line numbers.
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
@@ -408,28 +409,22 @@ def __init__(self, *scopes):
|
||||
@_config_mutator
|
||||
def push_scope(self, scope):
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
cmd_line_scope = None
|
||||
if self.scopes:
|
||||
highest_precedence_scope = list(self.scopes.values())[-1]
|
||||
if highest_precedence_scope.name == "command_line":
|
||||
# If the command-line scope is present, it should always
|
||||
# be the scope of highest precedence
|
||||
cmd_line_scope = self.pop_scope()
|
||||
|
||||
tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2)
|
||||
self.scopes[scope.name] = scope
|
||||
if cmd_line_scope:
|
||||
self.scopes["command_line"] = cmd_line_scope
|
||||
|
||||
@_config_mutator
|
||||
def pop_scope(self):
|
||||
"""Remove the highest precedence scope and return it."""
|
||||
name, scope = self.scopes.popitem(last=True)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
return scope
|
||||
|
||||
@_config_mutator
|
||||
def remove_scope(self, scope_name):
|
||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||
return self.scopes.pop(scope_name, None)
|
||||
scope = self.scopes.pop(scope_name, None)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
return scope
|
||||
|
||||
@property
|
||||
def file_scopes(self):
|
||||
@@ -994,18 +989,19 @@ def read_config_file(filename, schema=None):
|
||||
# schema when it's not necessary) while allowing us to validate against a
|
||||
# known schema when the top-level key could be incorrect.
|
||||
|
||||
# Ignore nonexisting files.
|
||||
if not os.path.exists(filename):
|
||||
# Ignore nonexistent files.
|
||||
tty.debug("Skipping nonexistent config path {0}".format(filename))
|
||||
return None
|
||||
|
||||
elif not os.path.isfile(filename):
|
||||
raise ConfigFileError("Invalid configuration. %s exists but is not a file." % filename)
|
||||
|
||||
elif not os.access(filename, os.R_OK):
|
||||
raise ConfigFileError("Config file is not readable: %s" % filename)
|
||||
raise ConfigFileError("Config file is not readable: {0}".format(filename))
|
||||
|
||||
try:
|
||||
tty.debug("Reading config file %s" % filename)
|
||||
tty.debug("Reading config from file {0}".format(filename))
|
||||
with open(filename) as f:
|
||||
data = syaml.load_config(f)
|
||||
|
||||
@@ -1020,7 +1016,15 @@ def read_config_file(filename, schema=None):
|
||||
raise ConfigFileError("Config file is empty or is not a valid YAML dict: %s" % filename)
|
||||
|
||||
except MarkedYAMLError as e:
|
||||
raise ConfigFileError("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
||||
msg = "Error parsing yaml"
|
||||
mark = e.context_mark if e.context_mark else e.problem_mark
|
||||
if mark:
|
||||
line, column = mark.line, mark.column
|
||||
msg += ": near %s, %s, %s" % (mark.name, str(line), str(column))
|
||||
else:
|
||||
msg += ": %s" % (filename)
|
||||
msg += ": %s" % (e.problem)
|
||||
raise ConfigFileError(msg)
|
||||
|
||||
except IOError as e:
|
||||
raise ConfigFileError("Error reading configuration file %s: %s" % (filename, str(e)))
|
||||
@@ -1296,6 +1300,95 @@ def _config_from(scopes_or_paths):
|
||||
return configuration
|
||||
|
||||
|
||||
def raw_github_gitlab_url(url):
|
||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||
|
||||
Args:
|
||||
url: url to be converted to raw form
|
||||
|
||||
Returns: (str) raw github/gitlab url or the original url
|
||||
"""
|
||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||
# removed and or, '/blame' if needed.
|
||||
if "github" in url or "gitlab" in url:
|
||||
return url.replace("/blob/", "/raw/")
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def collect_urls(base_url):
|
||||
"""Return a list of configuration URLs.
|
||||
|
||||
Arguments:
|
||||
base_url (str): URL for a configuration (yaml) file or a directory
|
||||
containing yaml file(s)
|
||||
|
||||
Returns: (list) list of configuration file(s) or empty list if none
|
||||
"""
|
||||
if not base_url:
|
||||
return []
|
||||
|
||||
extension = ".yaml"
|
||||
|
||||
if base_url.endswith(extension):
|
||||
return [base_url]
|
||||
|
||||
# Collect configuration URLs if the base_url is a "directory".
|
||||
_, links = web_util.spider(base_url, 0)
|
||||
return [link for link in links if link.endswith(extension)]
|
||||
|
||||
|
||||
def fetch_remote_configs(url, dest_dir, skip_existing=True):
|
||||
"""Retrieve configuration file(s) at the specified URL.
|
||||
|
||||
Arguments:
|
||||
url (str): URL for a configuration (yaml) file or a directory containing
|
||||
yaml file(s)
|
||||
dest_dir (str): destination directory
|
||||
skip_existing (bool): Skip files that already exist in dest_dir if
|
||||
``True``; otherwise, replace those files
|
||||
|
||||
Returns: (str) path to the corresponding file if URL is or contains a
|
||||
single file and it is the only file in the destination directory or
|
||||
the root (dest_dir) directory if multiple configuration files exist
|
||||
or are retrieved.
|
||||
"""
|
||||
|
||||
def _fetch_file(url):
|
||||
raw = raw_github_gitlab_url(url)
|
||||
tty.debug("Reading config from url {0}".format(raw))
|
||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||
|
||||
if not url:
|
||||
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||
|
||||
# Return the local path to the cached configuration file OR to the
|
||||
# directory containing the cached configuration files.
|
||||
config_links = collect_urls(url)
|
||||
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
||||
|
||||
paths = []
|
||||
for config_url in config_links:
|
||||
basename = os.path.basename(config_url)
|
||||
if skip_existing and basename in existing_files:
|
||||
tty.warn(
|
||||
"Will not fetch configuration from {0} since a version already"
|
||||
"exists in {1}".format(config_url, dest_dir)
|
||||
)
|
||||
path = os.path.join(dest_dir, basename)
|
||||
else:
|
||||
path = _fetch_file(config_url)
|
||||
|
||||
if path:
|
||||
paths.append(path)
|
||||
|
||||
if paths:
|
||||
return dest_dir if len(paths) > 1 else paths[0]
|
||||
|
||||
raise ConfigFileError("Cannot retrieve configuration (yaml) from {0}".format(url))
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
"""Superclass for all Spack config related errors."""
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -161,10 +163,21 @@ def entries_to_specs(entries):
|
||||
|
||||
|
||||
def read(path, apply_updates):
|
||||
with open(path, "r") as json_file:
|
||||
json_data = json.load(json_file)
|
||||
if sys.version_info >= (3, 0):
|
||||
decode_exception_type = json.decoder.JSONDecodeError
|
||||
else:
|
||||
decode_exception_type = ValueError
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
try:
|
||||
with open(path, "r") as json_file:
|
||||
json_data = json.load(json_file)
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
except (jsonschema.exceptions.ValidationError, decode_exception_type) as e:
|
||||
raise six.raise_from(
|
||||
ManifestValidationError("error parsing manifest JSON:", str(e)),
|
||||
e,
|
||||
)
|
||||
|
||||
specs = entries_to_specs(json_data["specs"])
|
||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||
@@ -179,3 +192,8 @@ def read(path, apply_updates):
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.db.add(spec, directory_layout=None)
|
||||
|
||||
|
||||
class ManifestValidationError(spack.error.SpackError):
|
||||
def __init__(self, msg, long_msg=None):
|
||||
super(ManifestValidationError, self).__init__(msg, long_msg)
|
||||
|
||||
@@ -48,7 +48,10 @@
|
||||
import spack.store
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.directory_layout import DirectoryLayoutError
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.error import SpackError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.util.crypto import bit_length
|
||||
@@ -1063,7 +1066,14 @@ def _read(self):
|
||||
elif self.is_upstream:
|
||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||
|
||||
def _add(self, spec, directory_layout=None, explicit=False, installation_time=None):
|
||||
def _add(
|
||||
self,
|
||||
spec,
|
||||
directory_layout=None,
|
||||
explicit=False,
|
||||
installation_time=None,
|
||||
allow_missing=False,
|
||||
):
|
||||
"""Add an install record for this spec to the database.
|
||||
|
||||
Assumes spec is installed in ``layout.path_for_spec(spec)``.
|
||||
@@ -1074,19 +1084,18 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No
|
||||
Args:
|
||||
spec: spec to be added
|
||||
directory_layout: layout of the spec installation
|
||||
**kwargs:
|
||||
explicit:
|
||||
Possible values: True, False, any
|
||||
|
||||
explicit
|
||||
Possible values: True, False, any
|
||||
|
||||
A spec that was installed following a specific user
|
||||
request is marked as explicit. If instead it was
|
||||
pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
installation_time
|
||||
Date and time of installation
|
||||
A spec that was installed following a specific user
|
||||
request is marked as explicit. If instead it was
|
||||
pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
installation_time:
|
||||
Date and time of installation
|
||||
allow_missing: if True, don't warn when installation is not found on on disk
|
||||
This is useful when installing specs without build deps.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||
@@ -1100,11 +1109,22 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for dep in spec.dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.dag_hash()
|
||||
if dkey not in self._data:
|
||||
extra_args = {"explicit": False, "installation_time": installation_time}
|
||||
self._add(dep, directory_layout, **extra_args)
|
||||
for edge in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
# warnings when a spec is installed, and its build dep
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
explicit=False,
|
||||
installation_time=installation_time,
|
||||
allow_missing=dep_allow_missing,
|
||||
)
|
||||
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
if not spec.external and directory_layout:
|
||||
@@ -1115,13 +1135,14 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No
|
||||
installed = True
|
||||
self._installed_prefixes.add(path)
|
||||
except DirectoryLayoutError as e:
|
||||
msg = (
|
||||
"{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
"the spec, due to: {3}"
|
||||
)
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
|
||||
msg = (
|
||||
"{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
"the spec, due to: {3}"
|
||||
)
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
installed = True
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import rename
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
@@ -593,7 +592,7 @@ def regenerate(self, concretized_root_specs):
|
||||
symlink(new_root, tmp_symlink_name)
|
||||
|
||||
# mv symlink atomically over root symlink to old_root
|
||||
rename(tmp_symlink_name, self.root)
|
||||
fs.rename(tmp_symlink_name, self.root)
|
||||
except Exception as e:
|
||||
# Clean up new view and temporary symlink on any failure.
|
||||
try:
|
||||
@@ -897,6 +896,11 @@ def repos_path(self):
|
||||
def log_path(self):
|
||||
return os.path.join(self.path, env_subdir_name, "logs")
|
||||
|
||||
@property
|
||||
def config_stage_dir(self):
|
||||
"""Directory for any staged configuration file(s)."""
|
||||
return os.path.join(self.env_subdir_path, "config")
|
||||
|
||||
@property
|
||||
def view_path_default(self):
|
||||
# default path for environment views
|
||||
@@ -928,6 +932,47 @@ def included_config_scopes(self):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
# strip file URL prefix, if needed, to avoid unnecessary remote
|
||||
# config processing for local files
|
||||
config_path = config_path.replace("file://", "")
|
||||
|
||||
if not os.path.exists(config_path):
|
||||
# Stage any remote configuration file(s)
|
||||
if spack.util.url.is_url_format(config_path):
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
basename = os.path.basename(config_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(config_path)
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path,
|
||||
self.config_stage_dir,
|
||||
skip_existing=True,
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
config_path = os.path.join(self.path, config_path)
|
||||
@@ -936,10 +981,14 @@ def included_config_scopes(self):
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = "env:%s:%s" % (self.name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = "env:%s:%s" % (self.name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
@@ -1024,6 +1073,58 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
|
||||
return bool(not existing)
|
||||
|
||||
def change_existing_spec(
|
||||
self,
|
||||
change_spec,
|
||||
list_name=user_speclist_name,
|
||||
match_spec=None,
|
||||
allow_changing_multiple_specs=False,
|
||||
):
|
||||
"""
|
||||
Find the spec identified by `match_spec` and change it to `change_spec`.
|
||||
|
||||
Arguments:
|
||||
change_spec (spack.spec.Spec): defines the spec properties that
|
||||
need to be changed. This will not change attributes of the
|
||||
matched spec unless they conflict with `change_spec`.
|
||||
list_name (str): identifies the spec list in the environment that
|
||||
should be modified
|
||||
match_spec (spack.spec.Spec): if set, this identifies the spec
|
||||
that should be changed. If not set, it is assumed we are
|
||||
looking for a spec with the same name as `change_spec`.
|
||||
"""
|
||||
if not (change_spec.name or (match_spec and match_spec.name)):
|
||||
raise ValueError(
|
||||
"Must specify a spec name to identify a single spec"
|
||||
" in the environment that will be changed"
|
||||
)
|
||||
match_spec = match_spec or Spec(change_spec.name)
|
||||
|
||||
list_to_change = self.spec_lists[list_name]
|
||||
if list_to_change.is_matrix:
|
||||
raise SpackEnvironmentError(
|
||||
"Cannot directly change specs in matrices:"
|
||||
" specify a named list that is not a matrix"
|
||||
)
|
||||
|
||||
matches = list(x for x in list_to_change if x.satisfies(match_spec))
|
||||
if len(matches) == 0:
|
||||
raise ValueError(
|
||||
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
||||
)
|
||||
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
||||
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
|
||||
|
||||
new_speclist = SpecList(list_name)
|
||||
for i, spec in enumerate(list_to_change):
|
||||
if spec.satisfies(match_spec):
|
||||
new_speclist.add(Spec.override(spec, change_spec))
|
||||
else:
|
||||
new_speclist.add(spec)
|
||||
|
||||
self.spec_lists[list_name] = new_speclist
|
||||
self.update_stale_references()
|
||||
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
query_spec = Spec(query_spec)
|
||||
@@ -1116,7 +1217,7 @@ def develop(self, spec, path, clone=False):
|
||||
|
||||
if clone:
|
||||
# "steal" the source code via staging API
|
||||
abspath = os.path.normpath(os.path.join(self.path, path))
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
|
||||
# Stage, at the moment, requires a concrete Spec, since it needs the
|
||||
# dag_hash for the stage dir name. Below though we ask for a stage
|
||||
@@ -1694,7 +1795,7 @@ def added_specs(self):
|
||||
spec for already concretized but not yet installed specs.
|
||||
"""
|
||||
# use a transaction to avoid overhead of repeated calls
|
||||
# to `package.installed`
|
||||
# to `package.spec.installed`
|
||||
with spack.store.db.read_transaction():
|
||||
concretized = dict(self.concretized_specs())
|
||||
for spec in self.user_specs:
|
||||
|
||||
@@ -44,7 +44,7 @@ def activate_header(env, shell, prompt=None):
|
||||
# TODO: prompt
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True)
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
|
||||
cmds += "export SPACK_ENV=%s;\n" % env.path
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
@@ -65,8 +65,8 @@ def deactivate_header(shell):
|
||||
if shell == "csh":
|
||||
cmds += "unsetenv SPACK_ENV;\n"
|
||||
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
||||
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
|
||||
cmds += "unsetenv SPACK_OLD_PROMPT;\n"
|
||||
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
|
||||
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
|
||||
cmds += "unalias despacktivate;\n"
|
||||
elif shell == "fish":
|
||||
cmds += "set -e SPACK_ENV;\n"
|
||||
|
||||
@@ -52,9 +52,9 @@
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.pattern as pattern
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
from spack.util.compression import decompressor_for, extension
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
|
||||
@@ -337,7 +337,8 @@ def fetch(self):
|
||||
url = None
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
if not self._existing_url(url):
|
||||
if not web_util.url_exists(url, self.curl):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -352,30 +353,6 @@ def fetch(self):
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(url)
|
||||
|
||||
def _existing_url(self, url):
|
||||
tty.debug("Checking existence of {0}".format(url))
|
||||
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
curl = self.curl
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
curl_args = ["--stderr", "-", "-s", "-f", "-r", "0-0", url]
|
||||
if not spack.config.get("config:verify_ssl"):
|
||||
curl_args.append("-k")
|
||||
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
|
||||
return curl.returncode == 0
|
||||
else:
|
||||
# Telling urllib to check if url is accessible
|
||||
try:
|
||||
url, headers, response = spack.util.web.read_from_url(url)
|
||||
except spack.util.web.SpackWebError as werr:
|
||||
msg = "Urllib fetch failed to verify url\
|
||||
{0}\n with error {1}".format(
|
||||
url, werr
|
||||
)
|
||||
raise FailedDownloadError(url, msg)
|
||||
return response.getcode() is None or response.getcode() == 200
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
return self._fetch_curl(url)
|
||||
@@ -397,8 +374,8 @@ def _fetch_urllib(self, url):
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
try:
|
||||
url, headers, response = spack.util.web.read_from_url(url)
|
||||
except spack.util.web.SpackWebError as e:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -433,38 +410,19 @@ def _fetch_curl(self, url):
|
||||
else:
|
||||
save_args = ["-O"]
|
||||
|
||||
curl_args = save_args + [
|
||||
"-f", # fail on >400 errors
|
||||
"-D",
|
||||
"-", # print out HTML headers
|
||||
"-L", # resolve 3xx redirects
|
||||
url,
|
||||
]
|
||||
|
||||
if not spack.config.get("config:verify_ssl"):
|
||||
curl_args.append("-k")
|
||||
|
||||
if sys.stdout.isatty() and tty.msg_enabled():
|
||||
curl_args.append("-#") # status bar when using a tty
|
||||
else:
|
||||
curl_args.append("-sS") # show errors if fail
|
||||
|
||||
connect_timeout = spack.config.get("config:connect_timeout", 10)
|
||||
|
||||
timeout = 0
|
||||
cookie_args = []
|
||||
if self.extra_options:
|
||||
cookie = self.extra_options.get("cookie")
|
||||
if cookie:
|
||||
curl_args.append("-j") # junk cookies
|
||||
curl_args.append("-b") # specify cookie
|
||||
curl_args.append(cookie)
|
||||
cookie_args.append("-j") # junk cookies
|
||||
cookie_args.append("-b") # specify cookie
|
||||
cookie_args.append(cookie)
|
||||
|
||||
timeout = self.extra_options.get("timeout")
|
||||
if timeout:
|
||||
connect_timeout = max(connect_timeout, int(timeout))
|
||||
|
||||
if connect_timeout > 0:
|
||||
# Timeout if can't establish a connection after n sec.
|
||||
curl_args.extend(["--connect-timeout", str(connect_timeout)])
|
||||
base_args = web_util.base_curl_fetch_args(url, timeout)
|
||||
curl_args = save_args + base_args + cookie_args
|
||||
|
||||
# Run curl but grab the mime type from the http headers
|
||||
curl = self.curl
|
||||
@@ -479,26 +437,10 @@ def _fetch_curl(self, url):
|
||||
if partial_file and os.path.lexists(partial_file):
|
||||
os.remove(partial_file)
|
||||
|
||||
if curl.returncode == 22:
|
||||
# This is a 404. Curl will print the error.
|
||||
raise FailedDownloadError(url, "URL %s was not found!" % url)
|
||||
|
||||
elif curl.returncode == 60:
|
||||
# This is a certificate error. Suggest spack -k
|
||||
raise FailedDownloadError(
|
||||
url,
|
||||
"Curl was unable to fetch due to invalid certificate. "
|
||||
"This is either an attack, or your cluster's SSL "
|
||||
"configuration is bad. If you believe your SSL "
|
||||
"configuration is bad, you can try running spack -k, "
|
||||
"which will not check SSL certificates."
|
||||
"Use this at your own risk.",
|
||||
)
|
||||
|
||||
else:
|
||||
# This is some other curl error. Curl will print the
|
||||
# error, but print a spack message too
|
||||
raise FailedDownloadError(url, "Curl failed with error %d" % curl.returncode)
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except web_util.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
@@ -556,7 +498,7 @@ def archive(self, destination):
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
||||
|
||||
spack.util.web.push_to_url(self.archive_file, destination, keep_original=True)
|
||||
web_util.push_to_url(self.archive_file, destination, keep_original=True)
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
@@ -671,7 +613,7 @@ def expand(self):
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, **kwargs):
|
||||
assert extension(destination) == "tar.gz"
|
||||
assert extension_from_path(destination) == "tar.gz"
|
||||
assert self.stage.source_path.startswith(self.stage.path)
|
||||
|
||||
tar = which("tar", required=True)
|
||||
@@ -1385,19 +1327,19 @@ def fetch(self):
|
||||
|
||||
parsed_url = url_util.parse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = spack.util.web.read_from_url(self.url)
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = spack.util.web.get_header(headers, "Content-type")
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
@@ -1426,15 +1368,13 @@ def __init__(self, *args, **kwargs):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
import spack.util.web as web_util
|
||||
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = url_util.parse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1489,7 +1429,7 @@ def from_kwargs(**kwargs):
|
||||
on attribute names (e.g., ``git``, ``hg``, etc.)
|
||||
|
||||
Raises:
|
||||
FetchError: If no ``fetch_strategy`` matches the args.
|
||||
spack.util.web.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
"""
|
||||
for fetcher in all_strategies:
|
||||
if fetcher.matches(kwargs):
|
||||
@@ -1586,7 +1526,7 @@ def for_package_version(pkg, version):
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
if not hasattr(pkg, "git"):
|
||||
raise FetchError(
|
||||
raise web_util.FetchError(
|
||||
"Cannot fetch git version for %s. Package has no 'git' attribute" % pkg.name
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
@@ -1604,7 +1544,19 @@ def for_package_version(pkg, version):
|
||||
ref_type: version.ref,
|
||||
"no_cache": True,
|
||||
}
|
||||
|
||||
kwargs["submodules"] = getattr(pkg, "submodules", False)
|
||||
|
||||
# if we have a ref_version already, and it is a version from the package
|
||||
# we can use that version's submodule specifications
|
||||
if pkg.version.ref_version:
|
||||
ref_version = spack.version.Version(pkg.version.ref_version[0])
|
||||
ref_version_attributes = pkg.versions.get(ref_version)
|
||||
if ref_version_attributes:
|
||||
kwargs["submodules"] = ref_version_attributes.get(
|
||||
"submodules", kwargs["submodules"]
|
||||
)
|
||||
|
||||
fetcher = GitFetchStrategy(**kwargs)
|
||||
return fetcher
|
||||
|
||||
@@ -1731,15 +1683,11 @@ def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
||||
|
||||
class FetchError(spack.error.SpackError):
|
||||
"""Superclass for fetcher errors."""
|
||||
|
||||
|
||||
class NoCacheError(FetchError):
|
||||
class NoCacheError(web_util.FetchError):
|
||||
"""Raised when there is no cached archive for a package."""
|
||||
|
||||
|
||||
class FailedDownloadError(FetchError):
|
||||
class FailedDownloadError(web_util.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
@@ -1747,23 +1695,23 @@ def __init__(self, url, msg=""):
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(FetchError):
|
||||
""" "Raised when an archive file is expected but none exists."""
|
||||
class NoArchiveFileError(web_util.FetchError):
|
||||
"""Raised when an archive file is expected but none exists."""
|
||||
|
||||
|
||||
class NoDigestError(FetchError):
|
||||
class NoDigestError(web_util.FetchError):
|
||||
"""Raised after attempt to checksum when URL has no digest."""
|
||||
|
||||
|
||||
class ExtrapolationError(FetchError):
|
||||
class ExtrapolationError(web_util.FetchError):
|
||||
"""Raised when we can't extrapolate a version for a package."""
|
||||
|
||||
|
||||
class FetcherConflict(FetchError):
|
||||
class FetcherConflict(web_util.FetchError):
|
||||
"""Raised for packages with invalid fetch attributes."""
|
||||
|
||||
|
||||
class InvalidArgsError(FetchError):
|
||||
class InvalidArgsError(web_util.FetchError):
|
||||
"""Raised when a version can't be deduced from a set of arguments."""
|
||||
|
||||
def __init__(self, pkg=None, version=None, **args):
|
||||
@@ -1776,11 +1724,11 @@ def __init__(self, pkg=None, version=None, **args):
|
||||
super(InvalidArgsError, self).__init__(msg, long_msg)
|
||||
|
||||
|
||||
class ChecksumError(FetchError):
|
||||
class ChecksumError(web_util.FetchError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
|
||||
|
||||
class NoStageError(FetchError):
|
||||
class NoStageError(web_util.FetchError):
|
||||
"""Raised when fetch operations are called before set_stage()."""
|
||||
|
||||
def __init__(self, method):
|
||||
|
||||
@@ -44,7 +44,7 @@ def __call__(self, spec):
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
process_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run", "test"), package_hash=False, name="process_hash"
|
||||
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@@ -180,6 +181,9 @@ def __call__(self, *args, **kwargs):
|
||||
if spec.external and not externals:
|
||||
status = "SKIPPED"
|
||||
skipped += 1
|
||||
elif not spec.installed:
|
||||
status = "SKIPPED"
|
||||
skipped += 1
|
||||
else:
|
||||
status = "NO-TESTS"
|
||||
untested += 1
|
||||
@@ -187,6 +191,7 @@ def __call__(self, *args, **kwargs):
|
||||
self.write_test_result(spec, status)
|
||||
except BaseException as exc:
|
||||
self.fails += 1
|
||||
tty.debug("Test failure: {0}".format(str(exc)))
|
||||
if isinstance(exc, (SyntaxError, TestSuiteSpecError)):
|
||||
# Create the test log file and report the error.
|
||||
self.ensure_stage()
|
||||
|
||||
@@ -84,6 +84,9 @@
|
||||
#: queue invariants).
|
||||
STATUS_REMOVED = "removed"
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
is_osx = sys.platform == "darwin"
|
||||
|
||||
|
||||
class InstallAction(object):
|
||||
#: Don't perform an install
|
||||
@@ -165,7 +168,9 @@ def _do_fake_install(pkg):
|
||||
if not pkg.name.startswith("lib"):
|
||||
library = "lib" + library
|
||||
|
||||
dso_suffix = ".dylib" if sys.platform == "darwin" else ".so"
|
||||
plat_shared = ".dll" if is_windows else ".so"
|
||||
plat_static = ".lib" if is_windows else ".a"
|
||||
dso_suffix = ".dylib" if is_osx else plat_shared
|
||||
|
||||
# Install fake command
|
||||
fs.mkdirp(pkg.prefix.bin)
|
||||
@@ -180,7 +185,7 @@ def _do_fake_install(pkg):
|
||||
|
||||
# Install fake shared and static libraries
|
||||
fs.mkdirp(pkg.prefix.lib)
|
||||
for suffix in [dso_suffix, ".a"]:
|
||||
for suffix in [dso_suffix, plat_static]:
|
||||
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
|
||||
|
||||
# Install fake man page
|
||||
@@ -257,6 +262,30 @@ def _hms(seconds):
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def _log_prefix(pkg_name):
|
||||
"""Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during
|
||||
the build."""
|
||||
pid = "{0}: ".format(os.getpid()) if tty.show_pid() else ""
|
||||
return "{0}{1}:".format(pid, pkg_name)
|
||||
|
||||
|
||||
def _print_installed_pkg(message):
|
||||
"""
|
||||
Output a message with a package icon.
|
||||
|
||||
Args:
|
||||
message (str): message to be output
|
||||
"""
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
|
||||
|
||||
def _print_timer(pre, pkg_id, fetch, build, total):
|
||||
tty.msg(
|
||||
"{0} Successfully installed {1}".format(pre, pkg_id),
|
||||
"Fetch: {0}. Build: {1}. Total: {2}.".format(_hms(fetch), _hms(build), _hms(total)),
|
||||
)
|
||||
|
||||
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
"""
|
||||
Extract the package from binary cache
|
||||
@@ -273,7 +302,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
bool: ``True`` if the package was extract from binary cache,
|
||||
``False`` otherwise
|
||||
"""
|
||||
installed_from_cache = _try_install_from_binary_cache(pkg, explicit, unsigned=unsigned)
|
||||
timer = Timer()
|
||||
installed_from_cache = _try_install_from_binary_cache(
|
||||
pkg, explicit, unsigned=unsigned, timer=timer
|
||||
)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
pre = "No binary for {0} found".format(pkg_id)
|
||||
@@ -282,23 +314,20 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
|
||||
tty.msg("{0}: installing from source".format(pre))
|
||||
return False
|
||||
|
||||
timer.stop()
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
_print_timer(
|
||||
pre=_log_prefix(pkg.name),
|
||||
pkg_id=pkg_id,
|
||||
fetch=timer.phases.get("search", 0) + timer.phases.get("fetch", 0),
|
||||
build=timer.phases.get("install", 0),
|
||||
total=timer.total,
|
||||
)
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
return True
|
||||
|
||||
|
||||
def _print_installed_pkg(message):
|
||||
"""
|
||||
Output a message with a package icon.
|
||||
|
||||
Args:
|
||||
message (str): message to be output
|
||||
"""
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
|
||||
|
||||
def _process_external_package(pkg, explicit):
|
||||
"""
|
||||
Helper function to run post install hooks and register external packages.
|
||||
@@ -340,7 +369,9 @@ def _process_external_package(pkg, explicit):
|
||||
spack.store.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None):
|
||||
def _process_binary_cache_tarball(
|
||||
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=None
|
||||
):
|
||||
"""
|
||||
Process the binary cache tarball.
|
||||
|
||||
@@ -352,6 +383,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_
|
||||
otherwise, ``False``
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
timer (Timer): timer to keep track of binary install phases.
|
||||
|
||||
Return:
|
||||
bool: ``True`` if the package was extracted from binary cache,
|
||||
@@ -360,6 +392,8 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_
|
||||
download_result = binary_distribution.download_tarball(
|
||||
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec
|
||||
)
|
||||
if timer:
|
||||
timer.phase("fetch")
|
||||
# see #10063 : install from source if tarball doesn't exist
|
||||
if download_result is None:
|
||||
tty.msg("{0} exists in binary cache but with different hash".format(pkg.name))
|
||||
@@ -376,10 +410,12 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
if timer:
|
||||
timer.phase("install")
|
||||
return True
|
||||
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=None):
|
||||
"""
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
@@ -388,16 +424,20 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
timer (Timer):
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
tty.debug("Searching for binary cache of {0}".format(pkg_id))
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
|
||||
if timer:
|
||||
timer.phase("search")
|
||||
|
||||
if not matches:
|
||||
return False
|
||||
|
||||
return _process_binary_cache_tarball(
|
||||
pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches
|
||||
pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches, timer=timer
|
||||
)
|
||||
|
||||
|
||||
@@ -820,7 +860,7 @@ def _check_deps_status(self, request):
|
||||
if spack.store.db.prefix_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
|
||||
# Attempt to get a read lock to ensure another process does not
|
||||
# uninstall the dependency while the requested spec is being
|
||||
@@ -828,7 +868,7 @@ def _check_deps_status(self, request):
|
||||
ltype, lock = self._ensure_locked("read", dep_pkg)
|
||||
if lock is None:
|
||||
msg = "{0} is write locked by another process".format(dep_id)
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg)
|
||||
|
||||
# Flag external and upstream packages as being installed
|
||||
if dep_pkg.spec.external or dep_pkg.spec.installed_upstream:
|
||||
@@ -883,6 +923,7 @@ def _prepare_for_install(self, task):
|
||||
"Install prefix collision for {0}".format(task.pkg_id),
|
||||
long_msg="Prefix directory {0} already used by another "
|
||||
"installed spec.".format(task.pkg.spec.prefix),
|
||||
pkg=task.pkg,
|
||||
)
|
||||
|
||||
# Make sure the installation directory is in the desired state
|
||||
@@ -1176,12 +1217,12 @@ def _install_task(self, task):
|
||||
Args:
|
||||
task (BuildTask): the installation build task for a package"""
|
||||
|
||||
install_args = task.request.install_args
|
||||
cache_only = install_args.get("cache_only")
|
||||
explicit = task.explicit
|
||||
install_args = task.request.install_args
|
||||
cache_only = task.cache_only
|
||||
use_cache = task.use_cache
|
||||
tests = install_args.get("tests")
|
||||
unsigned = install_args.get("unsigned")
|
||||
use_cache = install_args.get("use_cache")
|
||||
|
||||
pkg, pkg_id = task.pkg, task.pkg_id
|
||||
|
||||
@@ -1213,7 +1254,10 @@ def _install_task(self, task):
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
|
||||
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
||||
# establish runtime linkage via Windows Runtime link object
|
||||
# Note: this is a no-op on non Windows platforms
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
@@ -1571,7 +1615,8 @@ def install(self):
|
||||
raise InstallError(
|
||||
"Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
|
||||
pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
|
||||
)
|
||||
),
|
||||
pkg=pkg,
|
||||
)
|
||||
|
||||
# Skip the installation if the spec is not being installed locally
|
||||
@@ -1596,7 +1641,7 @@ def install(self):
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
if self.fail_fast:
|
||||
raise InstallError(fail_fast_err)
|
||||
raise InstallError(fail_fast_err, pkg=pkg)
|
||||
|
||||
continue
|
||||
|
||||
@@ -1718,7 +1763,7 @@ def install(self):
|
||||
)
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)))
|
||||
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg)
|
||||
|
||||
# Terminate at this point if the single explicit spec has
|
||||
# failed to install.
|
||||
@@ -1727,7 +1772,7 @@ def install(self):
|
||||
|
||||
# Track explicit spec id and error to summarize when done
|
||||
if task.explicit:
|
||||
failed_explicits.append((pkg_id, str(exc)))
|
||||
failed_explicits.append((pkg, pkg_id, str(exc)))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -1750,19 +1795,38 @@ def install(self):
|
||||
# Ensure we properly report if one or more explicit specs failed
|
||||
# or were not installed when should have been.
|
||||
missing = [
|
||||
request.pkg_id
|
||||
(request.pkg, request.pkg_id)
|
||||
for request in self.build_requests
|
||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||
]
|
||||
|
||||
if failed_explicits or missing:
|
||||
for pkg_id, err in failed_explicits:
|
||||
for _, pkg_id, err in failed_explicits:
|
||||
tty.error("{0}: {1}".format(pkg_id, err))
|
||||
|
||||
for pkg_id in missing:
|
||||
for _, pkg_id in missing:
|
||||
tty.error("{0}: Package was not installed".format(pkg_id))
|
||||
|
||||
pkg = None
|
||||
if len(failed_explicits) > 0:
|
||||
pkg = failed_explicits[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||
tty.debug(
|
||||
"Associating installation failure with first failed "
|
||||
"explicit package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
)
|
||||
|
||||
if not pkg and len(missing) > 0:
|
||||
pkg = missing[0][0]
|
||||
ids = [pkg_id for _, pkg_id in missing]
|
||||
tty.debug(
|
||||
"Associating installation failure with first "
|
||||
"missing package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
)
|
||||
|
||||
raise InstallError(
|
||||
"Installation request failed. Refer to " "reported errors for failing package(s)."
|
||||
"Installation request failed. Refer to reported errors for failing package(s).",
|
||||
pkg=pkg,
|
||||
)
|
||||
|
||||
|
||||
@@ -1812,8 +1876,7 @@ def __init__(self, pkg, install_args):
|
||||
self.filter_fn = spack.util.path.padding_filter if padding else None
|
||||
|
||||
# info/debug information
|
||||
pid = "{0}: ".format(os.getpid()) if tty.show_pid() else ""
|
||||
self.pre = "{0}{1}:".format(pid, pkg.name)
|
||||
self.pre = _log_prefix(pkg.name)
|
||||
self.pkg_id = package_id(pkg)
|
||||
|
||||
def run(self):
|
||||
@@ -1856,12 +1919,12 @@ def run(self):
|
||||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(self.pkg.spec)
|
||||
|
||||
build_time = self.timer.total - self.pkg._fetch_time
|
||||
tty.msg(
|
||||
"{0} Successfully installed {1}".format(self.pre, self.pkg_id),
|
||||
"Fetch: {0}. Build: {1}. Total: {2}.".format(
|
||||
_hms(self.pkg._fetch_time), _hms(build_time), _hms(self.timer.total)
|
||||
),
|
||||
_print_timer(
|
||||
pre=self.pre,
|
||||
pkg_id=self.pkg_id,
|
||||
fetch=self.pkg._fetch_time,
|
||||
build=self.timer.total - self.pkg._fetch_time,
|
||||
total=self.timer.total,
|
||||
)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
@@ -2060,7 +2123,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status, installed):
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
msg = "Cannot create a build task for {0} with status '{1}'"
|
||||
raise InstallError(msg.format(self.pkg_id, status))
|
||||
raise InstallError(msg.format(self.pkg_id, status), pkg=pkg)
|
||||
|
||||
self.status = status
|
||||
|
||||
@@ -2191,7 +2254,29 @@ def flag_installed(self, installed):
|
||||
@property
|
||||
def explicit(self):
|
||||
"""The package was explicitly requested by the user."""
|
||||
return self.pkg == self.request.pkg and self.request.install_args.get("explicit", True)
|
||||
return self.is_root and self.request.install_args.get("explicit", True)
|
||||
|
||||
@property
|
||||
def is_root(self):
|
||||
"""The package was requested directly, but may or may not be explicit
|
||||
in an environment."""
|
||||
return self.pkg == self.request.pkg
|
||||
|
||||
@property
|
||||
def use_cache(self):
|
||||
_use_cache = True
|
||||
if self.is_root:
|
||||
return self.request.install_args.get("package_use_cache", _use_cache)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
||||
|
||||
@property
|
||||
def cache_only(self):
|
||||
_cache_only = False
|
||||
if self.is_root:
|
||||
return self.request.install_args.get("package_cache_only", _cache_only)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
@@ -2273,21 +2358,23 @@ def __str__(self):
|
||||
def _add_default_args(self):
|
||||
"""Ensure standard install options are set to at least the default."""
|
||||
for arg, default in [
|
||||
("cache_only", False),
|
||||
("context", "build"), # installs *always* build
|
||||
("dependencies_cache_only", False),
|
||||
("dependencies_use_cache", True),
|
||||
("dirty", False),
|
||||
("fail_fast", False),
|
||||
("fake", False),
|
||||
("install_deps", True),
|
||||
("install_package", True),
|
||||
("install_source", False),
|
||||
("package_cache_only", False),
|
||||
("package_use_cache", True),
|
||||
("keep_prefix", False),
|
||||
("keep_stage", False),
|
||||
("restage", False),
|
||||
("skip_patch", False),
|
||||
("tests", False),
|
||||
("unsigned", False),
|
||||
("use_cache", True),
|
||||
("verbose", False),
|
||||
]:
|
||||
_ = self.install_args.setdefault(arg, default)
|
||||
@@ -2304,7 +2391,13 @@ def get_deptypes(self, pkg):
|
||||
"""
|
||||
deptypes = ["link", "run"]
|
||||
include_build_deps = self.install_args.get("include_build_deps")
|
||||
if not self.install_args.get("cache_only") or include_build_deps:
|
||||
|
||||
if self.pkg_id == package_id(pkg):
|
||||
cache_only = self.install_args.get("package_cache_only")
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
|
||||
if not cache_only or include_build_deps:
|
||||
deptypes.append("build")
|
||||
if self.run_tests(pkg):
|
||||
deptypes.append("test")
|
||||
@@ -2333,28 +2426,43 @@ def spec(self):
|
||||
"""The specification associated with the package."""
|
||||
return self.pkg.spec
|
||||
|
||||
def traverse_dependencies(self):
|
||||
def traverse_dependencies(self, spec=None, visited=None):
|
||||
"""
|
||||
Yield any dependencies of the appropriate type(s)
|
||||
|
||||
Yields:
|
||||
(Spec) The next child spec in the DAG
|
||||
"""
|
||||
get_spec = lambda s: s.spec
|
||||
# notice: deptype is not constant across nodes, so we cannot use
|
||||
# spec.traverse_edges(deptype=...).
|
||||
|
||||
deptypes = self.get_deptypes(self.pkg)
|
||||
tty.debug("Processing dependencies for {0}: {1}".format(self.pkg_id, deptypes))
|
||||
for dspec in self.spec.traverse_edges(
|
||||
deptype=deptypes, order="post", root=False, direction="children"
|
||||
):
|
||||
yield get_spec(dspec)
|
||||
if spec is None:
|
||||
spec = self.spec
|
||||
if visited is None:
|
||||
visited = set()
|
||||
deptype = self.get_deptypes(spec.package)
|
||||
|
||||
for dep in spec.dependencies(deptype=deptype):
|
||||
hash = dep.dag_hash()
|
||||
if hash in visited:
|
||||
continue
|
||||
visited.add(hash)
|
||||
# In Python 3: yield from self.traverse_dependencies(dep, visited)
|
||||
for s in self.traverse_dependencies(dep, visited):
|
||||
yield s
|
||||
yield dep
|
||||
|
||||
|
||||
class InstallError(spack.error.SpackError):
|
||||
"""Raised when something goes wrong during install or uninstall."""
|
||||
"""Raised when something goes wrong during install or uninstall.
|
||||
|
||||
def __init__(self, message, long_msg=None):
|
||||
The error can be annotated with a ``pkg`` attribute to allow the
|
||||
caller to get the package for which the exception was raised.
|
||||
"""
|
||||
|
||||
def __init__(self, message, long_msg=None, pkg=None):
|
||||
super(InstallError, self).__init__(message, long_msg)
|
||||
self.pkg = pkg
|
||||
|
||||
|
||||
class BadInstallPhase(InstallError):
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import pstats
|
||||
import re
|
||||
import signal
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
@@ -546,6 +547,12 @@ def setup_main_options(args):
|
||||
# Assign a custom function to show warnings
|
||||
warnings.showwarning = send_warning_to_tty
|
||||
|
||||
if sys.version_info[:2] == (2, 7):
|
||||
warnings.warn(
|
||||
"Python 2.7 support is deprecated and will be removed in Spack v0.20.\n"
|
||||
" Please move to Python 3.6 or higher."
|
||||
)
|
||||
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
tty.set_debug(args.debug)
|
||||
@@ -570,7 +577,14 @@ def setup_main_options(args):
|
||||
spack.config.set("config:locks", args.locks, scope="command_line")
|
||||
|
||||
if args.mock:
|
||||
spack.repo.path = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -623,15 +637,19 @@ class SpackCommand(object):
|
||||
their output.
|
||||
"""
|
||||
|
||||
def __init__(self, command_name):
|
||||
def __init__(self, command_name, subprocess=False):
|
||||
"""Create a new SpackCommand that invokes ``command_name`` when called.
|
||||
|
||||
Args:
|
||||
command_name (str): name of the command to invoke
|
||||
subprocess (bool): whether to fork a subprocess or not. Currently not supported on
|
||||
Windows, where it is always False.
|
||||
"""
|
||||
self.parser = make_argument_parser()
|
||||
self.command = self.parser.add_command(command_name)
|
||||
self.command_name = command_name
|
||||
# TODO: figure out how to support this on windows
|
||||
self.subprocess = subprocess if sys.platform != "win32" else False
|
||||
|
||||
def __call__(self, *argv, **kwargs):
|
||||
"""Invoke this SpackCommand.
|
||||
@@ -656,25 +674,36 @@ def __call__(self, *argv, **kwargs):
|
||||
self.error = None
|
||||
|
||||
prepend = kwargs["global_args"] if "global_args" in kwargs else []
|
||||
|
||||
args, unknown = self.parser.parse_known_args(prepend + [self.command_name] + list(argv))
|
||||
|
||||
fail_on_error = kwargs.get("fail_on_error", True)
|
||||
|
||||
out = StringIO()
|
||||
try:
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
|
||||
if self.subprocess:
|
||||
p = sp.Popen(
|
||||
[spack.paths.spack_script, self.command_name] + prepend + list(argv),
|
||||
stdout=sp.PIPE,
|
||||
stderr=sp.STDOUT,
|
||||
)
|
||||
out, self.returncode = p.communicate()
|
||||
out = out.decode()
|
||||
else:
|
||||
args, unknown = self.parser.parse_known_args(
|
||||
prepend + [self.command_name] + list(argv)
|
||||
)
|
||||
|
||||
except SystemExit as e:
|
||||
self.returncode = e.code
|
||||
out = StringIO()
|
||||
try:
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
|
||||
|
||||
except BaseException as e:
|
||||
tty.debug(e)
|
||||
self.error = e
|
||||
if fail_on_error:
|
||||
self._log_command_output(out)
|
||||
raise
|
||||
except SystemExit as e:
|
||||
self.returncode = e.code
|
||||
|
||||
except BaseException as e:
|
||||
tty.debug(e)
|
||||
self.error = e
|
||||
if fail_on_error:
|
||||
self._log_command_output(out)
|
||||
raise
|
||||
out = out.getvalue()
|
||||
|
||||
if fail_on_error and self.returncode not in (None, 0):
|
||||
self._log_command_output(out)
|
||||
@@ -683,7 +712,7 @@ def __call__(self, *argv, **kwargs):
|
||||
% (self.returncode, self.command_name, ", ".join("'%s'" % a for a in argv))
|
||||
)
|
||||
|
||||
return out.getvalue()
|
||||
return out
|
||||
|
||||
def _log_command_output(self, out):
|
||||
if tty.is_verbose():
|
||||
|
||||
@@ -63,6 +63,7 @@
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.util.web import FetchError
|
||||
from spack.version import GitVersion, Version, VersionBase
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
@@ -96,6 +97,9 @@
|
||||
_spack_configure_argsfile = "spack-configure-args.txt"
|
||||
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def preferred_version(pkg):
|
||||
"""
|
||||
Returns a sorted list of the preferred versions of the package.
|
||||
@@ -181,6 +185,30 @@ def copy(self):
|
||||
return other
|
||||
|
||||
|
||||
class WindowsRPathMeta(object):
|
||||
"""Collection of functionality surrounding Windows RPATH specific features
|
||||
|
||||
This is essentially meaningless for all other platforms
|
||||
due to their use of RPATH. All methods within this class are no-ops on
|
||||
non Windows. Packages can customize and manipulate this class as
|
||||
they would a genuine RPATH, i.e. adding directories that contain
|
||||
runtime library dependencies"""
|
||||
|
||||
def add_search_paths(self, *path):
|
||||
"""Add additional rpaths that are not implicitly included in the search
|
||||
scheme
|
||||
"""
|
||||
self.win_rpath.include_additional_link_paths(*path)
|
||||
|
||||
def windows_establish_runtime_linkage(self):
|
||||
"""Establish RPATH on Windows
|
||||
|
||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||
"""
|
||||
if is_windows:
|
||||
self.win_rpath.establish_link()
|
||||
|
||||
|
||||
#: Registers which are the detectable packages, by repo and package name
|
||||
#: Need a pass of package repositories to be filled.
|
||||
detectable_packages = collections.defaultdict(list)
|
||||
@@ -220,7 +248,7 @@ def to_windows_exe(exe):
|
||||
plat_exe = []
|
||||
if hasattr(cls, "executables"):
|
||||
for exe in cls.executables:
|
||||
if sys.platform == "win32":
|
||||
if is_windows:
|
||||
exe = to_windows_exe(exe)
|
||||
plat_exe.append(exe)
|
||||
return plat_exe
|
||||
@@ -512,7 +540,7 @@ def test_log_pathname(test_stage, spec):
|
||||
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
|
||||
|
||||
|
||||
class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
|
||||
class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewMixin, object)):
|
||||
"""This is the superclass for all spack packages.
|
||||
|
||||
***The Package class***
|
||||
@@ -752,6 +780,8 @@ def __init__(self, spec):
|
||||
# Set up timing variables
|
||||
self._fetch_time = 0.0
|
||||
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
@@ -1739,6 +1769,10 @@ def content_hash(self, content=None):
|
||||
|
||||
return b32_hash
|
||||
|
||||
@property
|
||||
def cmake_prefix_paths(self):
|
||||
return [self.prefix]
|
||||
|
||||
def _has_make_target(self, target):
|
||||
"""Checks to see if 'target' is a valid target in a Makefile.
|
||||
|
||||
@@ -2749,6 +2783,8 @@ def rpath(self):
|
||||
deps = self.spec.dependencies(deptype="link")
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||
if is_windows:
|
||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
||||
return rpaths
|
||||
|
||||
@property
|
||||
@@ -2840,6 +2876,10 @@ def test_process(pkg, kwargs):
|
||||
print_test_message(logger, "Skipped tests for external package", verbose)
|
||||
return
|
||||
|
||||
if not pkg.spec.installed:
|
||||
print_test_message(logger, "Skipped not installed package", verbose)
|
||||
return
|
||||
|
||||
# run test methods from the package and all virtuals it
|
||||
# provides virtuals have to be deduped by name
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
@@ -2910,6 +2950,9 @@ def test_process(pkg, kwargs):
|
||||
# non-pass-only methods
|
||||
if ran_actual_test_function:
|
||||
fsys.touch(pkg.tested_file)
|
||||
# log one more test message to provide a completion timestamp
|
||||
# for CDash reporting
|
||||
tty.msg("Completed testing")
|
||||
else:
|
||||
print_test_message(logger, "No tests to run", verbose)
|
||||
|
||||
@@ -3015,13 +3058,6 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
return visited
|
||||
|
||||
|
||||
class FetchError(spack.error.SpackError):
|
||||
"""Raised when something goes wrong during fetch."""
|
||||
|
||||
def __init__(self, message, long_msg=None):
|
||||
super(FetchError, self).__init__(message, long_msg)
|
||||
|
||||
|
||||
class PackageStillNeededError(InstallError):
|
||||
"""Raised when package is still needed by another on uninstall."""
|
||||
|
||||
|
||||
@@ -78,7 +78,9 @@ def lex_word(self, word):
|
||||
break
|
||||
|
||||
if remainder and not remainder_used:
|
||||
raise LexError("Invalid character", word, word.index(remainder))
|
||||
msg = "Invalid character, '{0}',".format(remainder[0])
|
||||
msg += " in '{0}' at index {1}".format(word, word.index(remainder))
|
||||
raise LexError(msg, word, word.index(remainder))
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
@@ -271,12 +271,13 @@ def to_dict(self):
|
||||
return data
|
||||
|
||||
|
||||
def from_dict(dictionary):
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(owner)
|
||||
pkg_cls = repository.get_pkg_class(owner)
|
||||
|
||||
if "url" in dictionary:
|
||||
return UrlPatch(
|
||||
@@ -329,7 +330,7 @@ class PatchCache(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, data=None):
|
||||
def __init__(self, repository, data=None):
|
||||
if data is None:
|
||||
self.index = {}
|
||||
else:
|
||||
@@ -337,9 +338,11 @@ def __init__(self, data=None):
|
||||
raise IndexError("invalid patch index; try `spack clean -m`")
|
||||
self.index = data["patches"]
|
||||
|
||||
self.repository = repository
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, stream):
|
||||
return PatchCache(sjson.load(stream))
|
||||
def from_json(cls, stream, repository):
|
||||
return PatchCache(repository=repository, data=sjson.load(stream))
|
||||
|
||||
def to_json(self, stream):
|
||||
sjson.dump({"patches": self.index}, stream)
|
||||
@@ -375,7 +378,7 @@ def patch_for_package(self, sha256, pkg):
|
||||
# because it's the index key)
|
||||
patch_dict = dict(patch_dict)
|
||||
patch_dict["sha256"] = sha256
|
||||
return from_dict(patch_dict)
|
||||
return from_dict(patch_dict, repository=self.repository)
|
||||
|
||||
def update_package(self, pkg_fullname):
|
||||
# remove this package from any patch entries that reference it.
|
||||
@@ -397,8 +400,8 @@ def update_package(self, pkg_fullname):
|
||||
del self.index[sha256]
|
||||
|
||||
# update the index with per-package patch indexes
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls)
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
@@ -410,7 +413,7 @@ def update(self, other):
|
||||
p2p.update(package_to_patch)
|
||||
|
||||
@staticmethod
|
||||
def _index_patches(pkg_class):
|
||||
def _index_patches(pkg_class, repository):
|
||||
index = {}
|
||||
|
||||
# Add patches from the class
|
||||
@@ -425,7 +428,7 @@ def _index_patches(pkg_class):
|
||||
for cond, dependency in conditions.items():
|
||||
for pcond, patch_list in dependency.patches.items():
|
||||
for patch in patch_list:
|
||||
dspec_cls = spack.repo.path.get_pkg_class(dependency.spec.name)
|
||||
dspec_cls = repository.get_pkg_class(dependency.spec.name)
|
||||
patch_dict = patch.to_dict()
|
||||
patch_dict.pop("sha256") # save some space
|
||||
index[patch.sha256] = {dspec_cls.fullname: patch_dict}
|
||||
|
||||
@@ -129,7 +129,7 @@ def __repr__(self):
|
||||
|
||||
|
||||
class ProviderIndex(_IndexBase):
|
||||
def __init__(self, specs=None, restrict=False):
|
||||
def __init__(self, repository, specs=None, restrict=False):
|
||||
"""Provider index based on a single mapping of providers.
|
||||
|
||||
Args:
|
||||
@@ -143,17 +143,16 @@ def __init__(self, specs=None, restrict=False):
|
||||
TODO: as possible without overly restricting results, so it is
|
||||
TODO: not the best name.
|
||||
"""
|
||||
if specs is None:
|
||||
specs = []
|
||||
|
||||
self.repository = repository
|
||||
self.restrict = restrict
|
||||
self.providers = {}
|
||||
|
||||
specs = specs or []
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if spec.virtual:
|
||||
if self.repository.is_virtual_safe(spec.name):
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
@@ -171,9 +170,10 @@ def update(self, spec):
|
||||
# Empty specs do not have a package
|
||||
return
|
||||
|
||||
assert not spec.virtual, "cannot update an index using a virtual spec"
|
||||
msg = "cannot update an index passing the virtual spec '{}'".format(spec.name)
|
||||
assert not self.repository.is_virtual_safe(spec.name), msg
|
||||
|
||||
pkg_provided = spec.package_class.provided
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provided_spec, provider_specs in six.iteritems(pkg_provided):
|
||||
for provider_spec in provider_specs:
|
||||
# TODO: fix this comment.
|
||||
@@ -262,12 +262,12 @@ def remove_provider(self, pkg_name):
|
||||
|
||||
def copy(self):
|
||||
"""Return a deep copy of this index."""
|
||||
clone = ProviderIndex()
|
||||
clone = ProviderIndex(repository=self.repository)
|
||||
clone.providers = self._transform(lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
|
||||
return clone
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
def from_json(stream, repository):
|
||||
"""Construct a provider index from its JSON representation.
|
||||
|
||||
Args:
|
||||
@@ -281,7 +281,7 @@ def from_json(stream):
|
||||
if "provider_index" not in data:
|
||||
raise ProviderIndexError("YAML ProviderIndex does not start with 'provider_index'")
|
||||
|
||||
index = ProviderIndex()
|
||||
index = ProviderIndex(repository=repository)
|
||||
providers = data["provider_index"]["providers"]
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
|
||||
@@ -11,8 +11,10 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.bootstrap
|
||||
@@ -76,15 +78,14 @@ def __init__(self, old_path, new_path):
|
||||
super(BinaryTextReplaceError, self).__init__(msg, err_msg)
|
||||
|
||||
|
||||
@memoized
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
if is_macos:
|
||||
return None
|
||||
|
||||
patchelf = executable.which("patchelf")
|
||||
if patchelf is None:
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
|
||||
return patchelf.path
|
||||
|
||||
@@ -887,7 +888,7 @@ def file_is_relocatable(filename, paths_to_relocate=None):
|
||||
# Remove the RPATHS from the strings in the executable
|
||||
set_of_strings = set(strings(filename, output=str).split())
|
||||
|
||||
m_type, m_subtype = mime_type(filename)
|
||||
m_type, m_subtype = fs.mime_type(filename)
|
||||
if m_type == "application":
|
||||
tty.debug("{0},{1}".format(m_type, m_subtype), level=2)
|
||||
|
||||
@@ -923,7 +924,7 @@ def is_binary(filename):
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
m_type, _ = mime_type(filename)
|
||||
m_type, _ = fs.mime_type(filename)
|
||||
|
||||
msg = "[{0}] -> ".format(filename)
|
||||
if m_type == "application":
|
||||
@@ -934,30 +935,6 @@ def is_binary(filename):
|
||||
return False
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_mime_type():
|
||||
file_cmd = executable.which("file")
|
||||
for arg in ["-b", "-h", "--mime-type"]:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output, level=2)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
# Memoize this due to repeated calls to libraries in the same directory.
|
||||
@llnl.util.lang.memoized
|
||||
def _exists_dir(dirname):
|
||||
@@ -975,7 +952,7 @@ def fixup_macos_rpath(root, filename):
|
||||
True if fixups were applied, else False
|
||||
"""
|
||||
abspath = os.path.join(root, filename)
|
||||
if mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
if fs.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
return False
|
||||
|
||||
# Get Mach-O header commands
|
||||
|
||||
@@ -12,13 +12,16 @@
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict # novm
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -37,6 +40,7 @@
|
||||
import spack.provider_index
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.file_cache
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
from spack.util.executable import which
|
||||
@@ -559,6 +563,9 @@ def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
||||
def last_mtime(self):
|
||||
return max(sinfo.st_mtime for sinfo in self._packages_to_stats.values())
|
||||
|
||||
def modified_since(self, since):
|
||||
return [name for name, sinfo in self._packages_to_stats.items() if sinfo.st_mtime > since]
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self._packages_to_stats[item]
|
||||
|
||||
@@ -573,6 +580,10 @@ def __len__(self):
|
||||
class Indexer(object):
|
||||
"""Adaptor for indexes that need to be generated when repos are updated."""
|
||||
|
||||
def __init__(self, repository):
|
||||
self.repository = repository
|
||||
self.index = None
|
||||
|
||||
def create(self):
|
||||
self.index = self._create()
|
||||
|
||||
@@ -613,10 +624,10 @@ class TagIndexer(Indexer):
|
||||
"""Lifecycle methods for a TagIndex on a Repo."""
|
||||
|
||||
def _create(self):
|
||||
return spack.tag.TagIndex()
|
||||
return spack.tag.TagIndex(self.repository)
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.tag.TagIndex.from_json(stream)
|
||||
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname)
|
||||
@@ -629,14 +640,17 @@ class ProviderIndexer(Indexer):
|
||||
"""Lifecycle methods for virtual package providers."""
|
||||
|
||||
def _create(self):
|
||||
return spack.provider_index.ProviderIndex()
|
||||
return spack.provider_index.ProviderIndex(repository=self.repository)
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream)
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
name = pkg_fullname.split(".")[-1]
|
||||
if spack.repo.path.is_virtual(name, use_index=False):
|
||||
is_virtual = (
|
||||
not self.repository.exists(name) or self.repository.get_pkg_class(name).virtual
|
||||
)
|
||||
if is_virtual:
|
||||
return
|
||||
self.index.remove_provider(pkg_fullname)
|
||||
self.index.update(pkg_fullname)
|
||||
@@ -649,7 +663,7 @@ class PatchIndexer(Indexer):
|
||||
"""Lifecycle methods for patch cache."""
|
||||
|
||||
def _create(self):
|
||||
return spack.patch.PatchCache()
|
||||
return spack.patch.PatchCache(repository=self.repository)
|
||||
|
||||
def needs_update(self):
|
||||
# TODO: patches can change under a package and we should handle
|
||||
@@ -659,7 +673,7 @@ def needs_update(self):
|
||||
return False
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.patch.PatchCache.from_json(stream)
|
||||
self.index = spack.patch.PatchCache.from_json(stream, repository=self.repository)
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
@@ -684,7 +698,7 @@ class RepoIndex(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, package_checker, namespace):
|
||||
def __init__(self, package_checker, namespace, cache):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
@@ -693,6 +707,7 @@ def __init__(self, package_checker, namespace):
|
||||
|
||||
self.indexers = {}
|
||||
self.indexes = {}
|
||||
self.cache = cache
|
||||
|
||||
def add_indexer(self, name, indexer):
|
||||
"""Add an indexer to the repo index.
|
||||
@@ -737,22 +752,26 @@ def _build_index(self, name, indexer):
|
||||
cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
misc_cache = spack.caches.misc_cache
|
||||
index_mtime = misc_cache.mtime(cache_filename)
|
||||
index_mtime = self.cache.mtime(cache_filename)
|
||||
needs_update = self.checker.modified_since(index_mtime)
|
||||
|
||||
needs_update = [x for x, sinfo in self.checker.items() if sinfo.st_mtime > index_mtime]
|
||||
|
||||
index_existed = misc_cache.init_entry(cache_filename)
|
||||
index_existed = self.cache.init_entry(cache_filename)
|
||||
if index_existed and not needs_update:
|
||||
# If the index exists and doesn't need an update, read it
|
||||
with misc_cache.read_transaction(cache_filename) as f:
|
||||
with self.cache.read_transaction(cache_filename) as f:
|
||||
indexer.read(f)
|
||||
|
||||
else:
|
||||
# Otherwise update it and rewrite the cache file
|
||||
with misc_cache.write_transaction(cache_filename) as (old, new):
|
||||
with self.cache.write_transaction(cache_filename) as (old, new):
|
||||
indexer.read(old) if old else indexer.create()
|
||||
|
||||
# Compute which packages needs to be updated **again** in case someone updated them
|
||||
# while we waited for the lock
|
||||
new_index_mtime = self.cache.mtime(cache_filename)
|
||||
if new_index_mtime != index_mtime:
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
for pkg_name in needs_update:
|
||||
namespaced_name = "%s.%s" % (self.namespace, pkg_name)
|
||||
indexer.update(namespaced_name)
|
||||
@@ -773,7 +792,8 @@ class RepoPath(object):
|
||||
repos (list): list Repo objects or paths to put in this RepoPath
|
||||
"""
|
||||
|
||||
def __init__(self, *repos):
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@@ -785,7 +805,7 @@ def __init__(self, *repos):
|
||||
for repo in repos:
|
||||
try:
|
||||
if isinstance(repo, six.string_types):
|
||||
repo = Repo(repo)
|
||||
repo = Repo(repo, cache=cache)
|
||||
self.put_last(repo)
|
||||
except RepoError as e:
|
||||
tty.warn(
|
||||
@@ -876,7 +896,7 @@ def all_package_classes(self):
|
||||
def provider_index(self):
|
||||
"""Merged ProviderIndex from all Repos in the RepoPath."""
|
||||
if self._provider_index is None:
|
||||
self._provider_index = spack.provider_index.ProviderIndex()
|
||||
self._provider_index = spack.provider_index.ProviderIndex(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._provider_index.merge(repo.provider_index)
|
||||
|
||||
@@ -886,7 +906,7 @@ def provider_index(self):
|
||||
def tag_index(self):
|
||||
"""Merged TagIndex from all Repos in the RepoPath."""
|
||||
if self._tag_index is None:
|
||||
self._tag_index = spack.tag.TagIndex()
|
||||
self._tag_index = spack.tag.TagIndex(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._tag_index.merge(repo.tag_index)
|
||||
|
||||
@@ -896,7 +916,7 @@ def tag_index(self):
|
||||
def patch_index(self):
|
||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||
if self._patch_index is None:
|
||||
self._patch_index = spack.patch.PatchCache()
|
||||
self._patch_index = spack.patch.PatchCache(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._patch_index.update(repo.patch_index)
|
||||
|
||||
@@ -925,7 +945,6 @@ def repo_for_pkg(self, spec):
|
||||
"""Given a spec, get the repository for its package."""
|
||||
# We don't @_autospec this function b/c it's called very frequently
|
||||
# and we want to avoid parsing str's into Specs unnecessarily.
|
||||
namespace = None
|
||||
if isinstance(spec, spack.spec.Spec):
|
||||
namespace = spec.namespace
|
||||
name = spec.name
|
||||
@@ -938,7 +957,7 @@ def repo_for_pkg(self, spec):
|
||||
if namespace:
|
||||
fullspace = python_package_for_repo(namespace)
|
||||
if fullspace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace)
|
||||
raise UnknownNamespaceError(namespace, name=name)
|
||||
return self.by_namespace[fullspace]
|
||||
|
||||
# If there's no namespace, search in the RepoPath.
|
||||
@@ -983,20 +1002,34 @@ def exists(self, pkg_name):
|
||||
"""
|
||||
return any(repo.exists(pkg_name) for repo in self.repos)
|
||||
|
||||
def is_virtual(self, pkg_name, use_index=True):
|
||||
"""True if the package with this name is virtual, False otherwise.
|
||||
|
||||
Set `use_index` False when calling from a code block that could
|
||||
be run during the computation of the provider index."""
|
||||
def _have_name(self, pkg_name):
|
||||
have_name = pkg_name is not None
|
||||
if have_name and not isinstance(pkg_name, str):
|
||||
raise ValueError("is_virtual(): expected package name, got %s" % type(pkg_name))
|
||||
if use_index:
|
||||
return have_name and pkg_name in self.provider_index
|
||||
else:
|
||||
return have_name and (
|
||||
not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||
)
|
||||
return have_name
|
||||
|
||||
def is_virtual(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function use the provider index. If calling from a code block that
|
||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
have_name = self._have_name(pkg_name)
|
||||
return have_name and pkg_name in self.provider_index
|
||||
|
||||
def is_virtual_safe(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function doesn't use the provider index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
have_name = self._have_name(pkg_name)
|
||||
return have_name and (not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual)
|
||||
|
||||
def __contains__(self, pkg_name):
|
||||
return self.exists(pkg_name)
|
||||
@@ -1015,7 +1048,7 @@ class Repo(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, root):
|
||||
def __init__(self, root, cache=None):
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
|
||||
Args:
|
||||
@@ -1070,6 +1103,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
@@ -1181,10 +1215,10 @@ def purge(self):
|
||||
def index(self):
|
||||
"""Construct the index for this repo lazily."""
|
||||
if self._repo_index is None:
|
||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace)
|
||||
self._repo_index.add_indexer("providers", ProviderIndexer())
|
||||
self._repo_index.add_indexer("tags", TagIndexer())
|
||||
self._repo_index.add_indexer("patches", PatchIndexer())
|
||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace, cache=self._cache)
|
||||
self._repo_index.add_indexer("providers", ProviderIndexer(self))
|
||||
self._repo_index.add_indexer("tags", TagIndexer(self))
|
||||
self._repo_index.add_indexer("patches", PatchIndexer(self))
|
||||
return self._repo_index
|
||||
|
||||
@property
|
||||
@@ -1283,9 +1317,26 @@ def last_mtime(self):
|
||||
return self._pkg_checker.last_mtime()
|
||||
|
||||
def is_virtual(self, pkg_name):
|
||||
"""True if the package with this name is virtual, False otherwise."""
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function use the provider index. If calling from a code block that
|
||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return pkg_name in self.provider_index
|
||||
|
||||
def is_virtual_safe(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function doesn't use the provider index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||
|
||||
def get_pkg_class(self, pkg_name):
|
||||
"""Get the class for the package out of its module.
|
||||
|
||||
@@ -1384,9 +1435,19 @@ def create_or_construct(path, namespace=None):
|
||||
return Repo(path)
|
||||
|
||||
|
||||
def _path(repo_dirs=None):
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
repo_dirs = repo_dirs or spack.config.get("repos")
|
||||
configuration = configuration or spack.config.config
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
def create(configuration):
|
||||
"""Create a RepoPath from a configuration object.
|
||||
|
||||
Args:
|
||||
configuration (spack.config.Configuration): configuration object
|
||||
"""
|
||||
repo_dirs = configuration.get("repos")
|
||||
if not repo_dirs:
|
||||
raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
|
||||
return RepoPath(*repo_dirs)
|
||||
@@ -1396,7 +1457,8 @@ def _path(repo_dirs=None):
|
||||
path = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
sys.meta_path.append(ReposFinder())
|
||||
REPOS_FINDER = ReposFinder()
|
||||
sys.meta_path.append(REPOS_FINDER)
|
||||
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
@@ -1405,36 +1467,67 @@ def all_package_names(include_virtuals=False):
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def additional_repository(repository):
|
||||
"""Adds temporarily a repository to the default one.
|
||||
|
||||
Args:
|
||||
repository: repository to be added
|
||||
"""
|
||||
path.put_first(repository)
|
||||
yield
|
||||
path.remove(repository)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repositories(*paths_and_repos):
|
||||
def use_repositories(*paths_and_repos, **kwargs):
|
||||
"""Use the repositories passed as arguments within the context manager.
|
||||
|
||||
Args:
|
||||
*paths_and_repos: paths to the repositories to be used, or
|
||||
already constructed Repo objects
|
||||
|
||||
override (bool): if True use only the repositories passed as input,
|
||||
if False add them to the top of the list of current repositories.
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
path, saved = RepoPath(*paths_and_repos), path
|
||||
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||
override = kwargs.get("override", True)
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
path, saved = create(configuration=spack.config.config), path
|
||||
try:
|
||||
yield path
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
path = saved
|
||||
|
||||
|
||||
class MockRepositoryBuilder(object):
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
def __init__(self, root_directory, namespace=None):
|
||||
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
|
||||
self.root, self.namespace = create_repo(str(root_directory), namespace)
|
||||
|
||||
def add_package(self, name, dependencies=None):
|
||||
"""Create a mock package in the repository, using a Jinja2 template.
|
||||
|
||||
Args:
|
||||
name (str): name of the new package
|
||||
dependencies (list): list of ("dep_spec", "dep_type", "condition") tuples.
|
||||
Both "dep_type" and "condition" can default to ``None`` in which case
|
||||
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
|
||||
"""
|
||||
dependencies = dependencies or []
|
||||
context = {"cls_name": spack.util.naming.mod_to_class(name), "dependencies": dependencies}
|
||||
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
fs.mkdirp(os.path.dirname(package_py))
|
||||
with open(package_py, "w") as f:
|
||||
f.write(text)
|
||||
|
||||
def remove(self, name):
|
||||
package_py = self.recipe_filename(name)
|
||||
shutil.rmtree(os.path.dirname(package_py))
|
||||
|
||||
def recipe_filename(self, name):
|
||||
return os.path.join(self.root, "packages", name, "package.py")
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
"""Superclass for repository-related errors."""
|
||||
|
||||
@@ -1463,7 +1556,7 @@ class UnknownPackageError(UnknownEntityError):
|
||||
"""Raised when we encounter a package spack doesn't have."""
|
||||
|
||||
def __init__(self, name, repo=None):
|
||||
msg = None
|
||||
msg = "Attempting to retrieve anonymous package."
|
||||
long_msg = None
|
||||
if name:
|
||||
if repo:
|
||||
@@ -1480,8 +1573,6 @@ def __init__(self, name, repo=None):
|
||||
long_msg = long_msg.format(name)
|
||||
else:
|
||||
long_msg = "You may need to run 'spack clean -m'."
|
||||
else:
|
||||
msg = "Attempting to retrieve anonymous package."
|
||||
|
||||
super(UnknownPackageError, self).__init__(msg, long_msg)
|
||||
self.name = name
|
||||
@@ -1490,8 +1581,12 @@ def __init__(self, name, repo=None):
|
||||
class UnknownNamespaceError(UnknownEntityError):
|
||||
"""Raised when we encounter an unknown namespace"""
|
||||
|
||||
def __init__(self, namespace):
|
||||
super(UnknownNamespaceError, self).__init__("Unknown namespace: %s" % namespace)
|
||||
def __init__(self, namespace, name=None):
|
||||
msg, long_msg = "Unknown namespace: {}".format(namespace), None
|
||||
if name == "yaml":
|
||||
long_msg = "Did you mean to specify a filename with './{}.{}'?"
|
||||
long_msg = long_msg.format(namespace, name)
|
||||
super(UnknownNamespaceError, self).__init__(msg, long_msg)
|
||||
|
||||
|
||||
class FailedConstructorError(RepoError):
|
||||
|
||||
@@ -245,6 +245,7 @@ def __init__(self, cls, function, format_name, args):
|
||||
self.cls = cls
|
||||
self.function = function
|
||||
self.filename = None
|
||||
self.ctest_parsing = getattr(args, "ctest_parsing", False)
|
||||
if args.cdash_upload_url:
|
||||
self.format_name = "cdash"
|
||||
self.filename = "cdash_report"
|
||||
@@ -271,10 +272,10 @@ def __enter__(self):
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.format_name:
|
||||
# Close the collector and restore the
|
||||
# original PackageInstaller._install_task
|
||||
# Close the collector and restore the original function
|
||||
self.collector.__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
report_data = {"specs": self.collector.specs}
|
||||
report_data["ctest-parsing"] = self.ctest_parsing
|
||||
report_fn = getattr(self.report_writer, "%s_report" % self.type)
|
||||
report_fn(self.filename, report_data)
|
||||
|
||||
@@ -23,8 +23,10 @@
|
||||
import spack.build_environment
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.platforms
|
||||
from spack.error import SpackError
|
||||
from spack.reporter import Reporter
|
||||
from spack.reporters.extract import extract_test_parts
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.executable import which
|
||||
from spack.util.log_parse import parse_log_events
|
||||
@@ -46,6 +48,11 @@
|
||||
cdash_phases.add("update")
|
||||
|
||||
|
||||
def build_stamp(track, timestamp):
|
||||
buildstamp_format = "%Y%m%d-%H%M-{0}".format(track)
|
||||
return time.strftime(buildstamp_format, time.localtime(timestamp))
|
||||
|
||||
|
||||
class CDash(Reporter):
|
||||
"""Generate reports of spec installations for CDash.
|
||||
|
||||
@@ -80,6 +87,9 @@ def __init__(self, args):
|
||||
packages = args.spec
|
||||
elif getattr(args, "specs", ""):
|
||||
packages = args.specs
|
||||
elif getattr(args, "package", ""):
|
||||
# Ensure CI 'spack test run' can output CDash results
|
||||
packages = args.package
|
||||
else:
|
||||
packages = []
|
||||
for file in args.specfiles:
|
||||
@@ -90,29 +100,36 @@ def __init__(self, args):
|
||||
self.base_buildname = args.cdash_build or self.install_command
|
||||
self.site = args.cdash_site or socket.gethostname()
|
||||
self.osname = platform.system()
|
||||
self.osrelease = platform.release()
|
||||
self.target = spack.platforms.host().target("default_target")
|
||||
self.endtime = int(time.time())
|
||||
if args.cdash_buildstamp:
|
||||
self.buildstamp = args.cdash_buildstamp
|
||||
else:
|
||||
buildstamp_format = "%Y%m%d-%H%M-{0}".format(args.cdash_track)
|
||||
self.buildstamp = time.strftime(buildstamp_format, time.localtime(self.endtime))
|
||||
self.buildstamp = (
|
||||
args.cdash_buildstamp
|
||||
if args.cdash_buildstamp
|
||||
else build_stamp(args.cdash_track, self.endtime)
|
||||
)
|
||||
self.buildIds = collections.OrderedDict()
|
||||
self.revision = ""
|
||||
git = which("git")
|
||||
with working_dir(spack.paths.spack_root):
|
||||
self.revision = git("rev-parse", "HEAD", output=str).strip()
|
||||
self.generator = "spack-{0}".format(spack.main.get_version())
|
||||
self.multiple_packages = False
|
||||
|
||||
def report_build_name(self, pkg_name):
|
||||
return (
|
||||
"{0} - {1}".format(self.base_buildname, pkg_name)
|
||||
if self.multiple_packages
|
||||
else self.base_buildname
|
||||
)
|
||||
|
||||
def build_report_for_package(self, directory_name, package, duration):
|
||||
if "stdout" not in package:
|
||||
# Skip reporting on packages that did not generate any output.
|
||||
return
|
||||
|
||||
self.current_package_name = package["name"]
|
||||
if self.multiple_packages:
|
||||
self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
|
||||
else:
|
||||
self.buildname = self.base_buildname
|
||||
self.buildname = self.report_build_name(self.current_package_name)
|
||||
report_data = self.initialize_report(directory_name)
|
||||
for phase in cdash_phases:
|
||||
report_data[phase] = {}
|
||||
@@ -228,6 +245,7 @@ def build_report(self, directory_name, input_data):
|
||||
# Do an initial scan to determine if we are generating reports for more
|
||||
# than one package. When we're only reporting on a single package we
|
||||
# do not explicitly include the package's name in the CDash build name.
|
||||
self.multipe_packages = False
|
||||
num_packages = 0
|
||||
for spec in input_data["specs"]:
|
||||
# Do not generate reports for packages that were installed
|
||||
@@ -255,27 +273,19 @@ def build_report(self, directory_name, input_data):
|
||||
self.build_report_for_package(directory_name, package, duration)
|
||||
self.finalize_report()
|
||||
|
||||
def test_report_for_package(self, directory_name, package, duration):
|
||||
if "stdout" not in package:
|
||||
# Skip reporting on packages that did not generate any output.
|
||||
return
|
||||
def extract_ctest_test_data(self, package, phases, report_data):
|
||||
"""Extract ctest test data for the package."""
|
||||
# Track the phases we perform so we know what reports to create.
|
||||
# We always report the update step because this is how we tell CDash
|
||||
# what revision of Spack we are using.
|
||||
assert "update" in phases
|
||||
|
||||
self.current_package_name = package["name"]
|
||||
self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
|
||||
|
||||
report_data = self.initialize_report(directory_name)
|
||||
|
||||
for phase in ("test", "update"):
|
||||
for phase in phases:
|
||||
report_data[phase] = {}
|
||||
report_data[phase]["loglines"] = []
|
||||
report_data[phase]["status"] = 0
|
||||
report_data[phase]["endtime"] = self.endtime
|
||||
|
||||
# Track the phases we perform so we know what reports to create.
|
||||
# We always report the update step because this is how we tell CDash
|
||||
# what revision of Spack we are using.
|
||||
phases_encountered = ["test", "update"]
|
||||
|
||||
# Generate a report for this package.
|
||||
# The first line just says "Testing package name-hash"
|
||||
report_data["test"]["loglines"].append(
|
||||
@@ -284,8 +294,7 @@ def test_report_for_package(self, directory_name, package, duration):
|
||||
for line in package["stdout"].splitlines()[1:]:
|
||||
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
|
||||
|
||||
self.starttime = self.endtime - duration
|
||||
for phase in phases_encountered:
|
||||
for phase in phases:
|
||||
report_data[phase]["starttime"] = self.starttime
|
||||
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
|
||||
errors, warnings = parse_log_events(report_data[phase]["loglines"])
|
||||
@@ -326,6 +335,19 @@ def clean_log_event(event):
|
||||
if phase == "update":
|
||||
report_data[phase]["revision"] = self.revision
|
||||
|
||||
def extract_standalone_test_data(self, package, phases, report_data):
|
||||
"""Extract stand-alone test outputs for the package."""
|
||||
|
||||
testing = {}
|
||||
report_data["testing"] = testing
|
||||
testing["starttime"] = self.starttime
|
||||
testing["endtime"] = self.starttime
|
||||
testing["generator"] = self.generator
|
||||
testing["parts"] = extract_test_parts(package["name"], package["stdout"].splitlines())
|
||||
|
||||
def report_test_data(self, directory_name, package, phases, report_data):
|
||||
"""Generate and upload the test report(s) for the package."""
|
||||
for phase in phases:
|
||||
# Write the report.
|
||||
report_name = phase.capitalize() + ".xml"
|
||||
report_file_name = package["name"] + "_" + report_name
|
||||
@@ -333,7 +355,7 @@ def clean_log_event(event):
|
||||
|
||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
||||
env = spack.tengine.make_environment()
|
||||
if phase != "update":
|
||||
if phase not in ["update", "testing"]:
|
||||
# Update.xml stores site information differently
|
||||
# than the rest of the CTest XML files.
|
||||
site_template = posixpath.join(self.template_dir, "Site.xml")
|
||||
@@ -343,18 +365,65 @@ def clean_log_event(event):
|
||||
phase_template = posixpath.join(self.template_dir, report_name)
|
||||
t = env.get_template(phase_template)
|
||||
f.write(t.render(report_data))
|
||||
|
||||
tty.debug("Preparing to upload {0}".format(phase_report))
|
||||
self.upload(phase_report)
|
||||
|
||||
def test_report_for_package(self, directory_name, package, duration, ctest_parsing=False):
|
||||
if "stdout" not in package:
|
||||
# Skip reporting on packages that did not generate any output.
|
||||
tty.debug("Skipping report for {0}: No generated output".format(package["name"]))
|
||||
return
|
||||
|
||||
self.current_package_name = package["name"]
|
||||
if self.base_buildname == self.install_command:
|
||||
# The package list is NOT all that helpful in this case
|
||||
self.buildname = "{0}-{1}".format(self.current_package_name, package["id"])
|
||||
else:
|
||||
self.buildname = self.report_build_name(self.current_package_name)
|
||||
self.starttime = self.endtime - duration
|
||||
|
||||
report_data = self.initialize_report(directory_name)
|
||||
report_data["hostname"] = socket.gethostname()
|
||||
if ctest_parsing:
|
||||
phases = ["test", "update"]
|
||||
self.extract_ctest_test_data(package, phases, report_data)
|
||||
else:
|
||||
phases = ["testing"]
|
||||
self.extract_standalone_test_data(package, phases, report_data)
|
||||
|
||||
self.report_test_data(directory_name, package, phases, report_data)
|
||||
|
||||
def test_report(self, directory_name, input_data):
|
||||
# Generate reports for each package in each spec.
|
||||
"""Generate reports for each package in each spec."""
|
||||
tty.debug("Processing test report")
|
||||
for spec in input_data["specs"]:
|
||||
duration = 0
|
||||
if "time" in spec:
|
||||
duration = int(spec["time"])
|
||||
for package in spec["packages"]:
|
||||
self.test_report_for_package(directory_name, package, duration)
|
||||
self.test_report_for_package(
|
||||
directory_name,
|
||||
package,
|
||||
duration,
|
||||
input_data["ctest-parsing"],
|
||||
)
|
||||
|
||||
self.finalize_report()
|
||||
|
||||
def test_skipped_report(self, directory_name, spec, reason=None):
|
||||
output = "Skipped {0} package".format(spec.name)
|
||||
if reason:
|
||||
output += "\n{0}".format(reason)
|
||||
|
||||
package = {
|
||||
"name": spec.name,
|
||||
"id": spec.dag_hash(),
|
||||
"result": "skipped",
|
||||
"stdout": output,
|
||||
}
|
||||
self.test_report_for_package(directory_name, package, duration=0.0, ctest_parsing=False)
|
||||
|
||||
def concretization_report(self, directory_name, msg):
|
||||
self.buildname = self.base_buildname
|
||||
report_data = self.initialize_report(directory_name)
|
||||
@@ -384,12 +453,16 @@ def initialize_report(self, directory_name):
|
||||
report_data["buildname"] = self.buildname
|
||||
report_data["buildstamp"] = self.buildstamp
|
||||
report_data["install_command"] = self.install_command
|
||||
report_data["generator"] = self.generator
|
||||
report_data["osname"] = self.osname
|
||||
report_data["osrelease"] = self.osrelease
|
||||
report_data["site"] = self.site
|
||||
report_data["target"] = self.target
|
||||
return report_data
|
||||
|
||||
def upload(self, filename):
|
||||
if not self.cdash_upload_url:
|
||||
print("Cannot upload {0} due to missing upload url".format(filename))
|
||||
return
|
||||
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
@@ -412,7 +485,7 @@ def upload(self, filename):
|
||||
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
||||
try:
|
||||
# By default, urllib2 only support GET and POST.
|
||||
# CDash needs expects this file to be uploaded via PUT.
|
||||
# CDash expects this file to be uploaded via PUT.
|
||||
request.get_method = lambda: "PUT"
|
||||
response = opener.open(request)
|
||||
if self.current_package_name not in self.buildIds:
|
||||
@@ -428,13 +501,13 @@ def upload(self, filename):
|
||||
|
||||
def finalize_report(self):
|
||||
if self.buildIds:
|
||||
print("View your build results here:")
|
||||
tty.msg("View your build results here:")
|
||||
for package_name, buildid in iteritems(self.buildIds):
|
||||
# Construct and display a helpful link if CDash responded with
|
||||
# a buildId.
|
||||
build_url = self.cdash_upload_url
|
||||
build_url = build_url[0 : build_url.find("submit.php")]
|
||||
build_url += "buildSummary.php?buildid={0}".format(buildid)
|
||||
print("{0}: {1}".format(package_name, build_url))
|
||||
tty.msg("{0}: {1}".format(package_name, build_url))
|
||||
if not self.success:
|
||||
raise SpackError("Errors encountered, see above for more details")
|
||||
|
||||
212
lib/spack/spack/reporters/extract.py
Normal file
212
lib/spack/spack/reporters/extract.py
Normal file
@@ -0,0 +1,212 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
from datetime import datetime
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
# The keys here represent the only recognized (ctest/cdash) status values
|
||||
completed = {
|
||||
"failed": "Completed",
|
||||
"passed": "Completed",
|
||||
"notrun": "No tests to run",
|
||||
}
|
||||
|
||||
log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)")
|
||||
returns_regexp = re.compile(r"\[([0-9 ,]*)\]")
|
||||
|
||||
skip_msgs = ["Testing package", "Results for", "Detected the following"]
|
||||
skip_regexps = [re.compile(r"{0}".format(msg)) for msg in skip_msgs]
|
||||
|
||||
status_values = ["FAILED", "PASSED", "NO-TESTS"]
|
||||
status_regexps = [re.compile(r"^({0})".format(stat)) for stat in status_values]
|
||||
|
||||
|
||||
def add_part_output(part, line):
|
||||
if part:
|
||||
part["loglines"].append(xml.sax.saxutils.escape(line))
|
||||
|
||||
|
||||
def elapsed(current, previous):
|
||||
if not (current and previous):
|
||||
return 0
|
||||
|
||||
diff = current - previous
|
||||
tty.debug("elapsed = %s - %s = %s" % (current, previous, diff))
|
||||
return diff.total_seconds()
|
||||
|
||||
|
||||
def expected_failure(line):
|
||||
if not line:
|
||||
return False
|
||||
|
||||
match = returns_regexp.search(line)
|
||||
xfail = "0" not in match.group(0) if match else False
|
||||
return xfail
|
||||
|
||||
|
||||
def new_part():
|
||||
return {
|
||||
"command": None,
|
||||
"completed": "Unknown",
|
||||
"desc": None,
|
||||
"elapsed": None,
|
||||
"name": None,
|
||||
"loglines": [],
|
||||
"output": None,
|
||||
"status": "passed",
|
||||
}
|
||||
|
||||
|
||||
def part_name(source):
|
||||
# TODO: Should be passed the package prefix and only remove it
|
||||
elements = []
|
||||
for e in source.replace("'", "").split(" "):
|
||||
elements.append(os.path.basename(e) if os.sep in e else e)
|
||||
return "_".join(elements)
|
||||
|
||||
|
||||
def process_part_end(part, curr_time, last_time):
|
||||
if part:
|
||||
if not part["elapsed"]:
|
||||
part["elapsed"] = elapsed(curr_time, last_time)
|
||||
|
||||
stat = part["status"]
|
||||
if stat in completed:
|
||||
if stat == "passed" and expected_failure(part["desc"]):
|
||||
part["completed"] = "Expected to fail"
|
||||
elif part["completed"] == "Unknown":
|
||||
part["completed"] = completed[stat]
|
||||
part["output"] = "\n".join(part["loglines"])
|
||||
|
||||
|
||||
def timestamp(time_string):
|
||||
return datetime.strptime(time_string, "%Y-%m-%d-%H:%M:%S.%f")
|
||||
|
||||
|
||||
def skip(line):
|
||||
for regex in skip_regexps:
|
||||
match = regex.search(line)
|
||||
if match:
|
||||
return match
|
||||
|
||||
|
||||
def status(line):
|
||||
for regex in status_regexps:
|
||||
match = regex.search(line)
|
||||
if match:
|
||||
stat = match.group(0)
|
||||
stat = "notrun" if stat == "NO-TESTS" else stat
|
||||
return stat.lower()
|
||||
|
||||
|
||||
def extract_test_parts(default_name, outputs):
|
||||
parts = []
|
||||
part = {}
|
||||
testdesc = ""
|
||||
last_time = None
|
||||
curr_time = None
|
||||
for line in outputs:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
add_part_output(part, line)
|
||||
continue
|
||||
|
||||
if skip(line):
|
||||
continue
|
||||
|
||||
# Skipped tests start with "Skipped" and end with "package"
|
||||
if line.startswith("Skipped") and line.endswith("package"):
|
||||
part = new_part()
|
||||
part["command"] = "Not Applicable"
|
||||
part["completed"] = line
|
||||
part["elapsed"] = 0.0
|
||||
part["name"] = default_name
|
||||
part["status"] = "notrun"
|
||||
parts.append(part)
|
||||
continue
|
||||
|
||||
# Process Spack log messages
|
||||
if line.find("==>") != -1:
|
||||
match = log_regexp.search(line)
|
||||
if match:
|
||||
curr_time = timestamp(match.group(1))
|
||||
msg = match.group(2)
|
||||
|
||||
# Skip logged message for caching build-time data
|
||||
if msg.startswith("Installing"):
|
||||
continue
|
||||
|
||||
# New command means the start of a new test part
|
||||
if msg.startswith("'") and msg.endswith("'"):
|
||||
# Update the last part processed
|
||||
process_part_end(part, curr_time, last_time)
|
||||
|
||||
part = new_part()
|
||||
part["command"] = msg
|
||||
part["name"] = part_name(msg)
|
||||
parts.append(part)
|
||||
|
||||
# Save off the optional test description if it was
|
||||
# tty.debuged *prior to* the command and reset
|
||||
if testdesc:
|
||||
part["desc"] = testdesc
|
||||
testdesc = ""
|
||||
|
||||
else:
|
||||
# Update the last part processed since a new log message
|
||||
# means a non-test action
|
||||
process_part_end(part, curr_time, last_time)
|
||||
|
||||
if testdesc:
|
||||
# We had a test description but no command so treat
|
||||
# as a new part (e.g., some import tests)
|
||||
part = new_part()
|
||||
part["name"] = "_".join(testdesc.split())
|
||||
part["command"] = "unknown"
|
||||
part["desc"] = testdesc
|
||||
parts.append(part)
|
||||
process_part_end(part, curr_time, curr_time)
|
||||
|
||||
# Assuming this is a description for the next test part
|
||||
testdesc = msg
|
||||
|
||||
else:
|
||||
tty.debug("Did not recognize test output '{0}'".format(line))
|
||||
|
||||
# Each log message potentially represents a new test part so
|
||||
# save off the last timestamp
|
||||
last_time = curr_time
|
||||
continue
|
||||
|
||||
# Check for status values
|
||||
stat = status(line)
|
||||
if stat:
|
||||
if part:
|
||||
part["status"] = stat
|
||||
add_part_output(part, line)
|
||||
else:
|
||||
tty.warn("No part to add status from '{0}'".format(line))
|
||||
continue
|
||||
|
||||
add_part_output(part, line)
|
||||
|
||||
# Process the last lingering part IF it didn't generate status
|
||||
process_part_end(part, curr_time, last_time)
|
||||
|
||||
# If no parts, create a skeleton to flag that the tests are not run
|
||||
if not parts:
|
||||
part = new_part()
|
||||
stat = "notrun"
|
||||
part["command"] = "Not Applicable"
|
||||
part["completed"] = completed[stat]
|
||||
part["elapsed"] = 0.0
|
||||
part["name"] = default_name
|
||||
part["status"] = stat
|
||||
parts.append(part)
|
||||
|
||||
return parts
|
||||
@@ -3,11 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os.path
|
||||
import posixpath
|
||||
|
||||
import spack.build_environment
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.tengine
|
||||
from spack.reporter import Reporter
|
||||
|
||||
__all__ = ["JUnit"]
|
||||
@@ -23,6 +22,11 @@ def __init__(self, args):
|
||||
self.template_file = posixpath.join("reports", "junit.xml")
|
||||
|
||||
def build_report(self, filename, report_data):
|
||||
if not (os.path.splitext(filename))[1]:
|
||||
# Ensure the report name will end with the proper extension;
|
||||
# otherwise, it currently defaults to the "directory" name.
|
||||
filename = filename + ".xml"
|
||||
|
||||
# Write the report
|
||||
with open(filename, "w") as f:
|
||||
env = spack.tengine.make_environment()
|
||||
|
||||
@@ -52,6 +52,15 @@
|
||||
"properties": runner_attributes_schema_items,
|
||||
}
|
||||
|
||||
remove_attributes_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["tags"],
|
||||
"properties": {
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
runner_attributes_schema_items,
|
||||
@@ -80,6 +89,7 @@
|
||||
],
|
||||
},
|
||||
},
|
||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||
"mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@@ -93,6 +103,7 @@
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"remove-attributes": remove_attributes_schema,
|
||||
"runner-attributes": runner_selector_schema,
|
||||
},
|
||||
},
|
||||
@@ -101,6 +112,12 @@
|
||||
"signing-job-attributes": runner_selector_schema,
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
import spack.util.timer
|
||||
import spack.variant
|
||||
import spack.version
|
||||
@@ -307,7 +308,10 @@ def check_same_flags(flag_dict_1, flag_dict_2):
|
||||
for t in types:
|
||||
values1 = set(flag_dict_1.get(t, []))
|
||||
values2 = set(flag_dict_2.get(t, []))
|
||||
assert values1 == values2
|
||||
error_msg = "Internal Error: A mismatch in flags has occurred:"
|
||||
error_msg += "\n\tvalues1: {v1}\n\tvalues2: {v2}".format(v1=values1, v2=values2)
|
||||
error_msg += "\n Please report this as an issue to the spack maintainers"
|
||||
assert values1 == values2, error_msg
|
||||
|
||||
|
||||
def check_packages_exist(specs):
|
||||
@@ -363,7 +367,11 @@ def format_core(self, core):
|
||||
|
||||
Modeled after traceback.format_stack.
|
||||
"""
|
||||
assert self.control
|
||||
error_msg = (
|
||||
"Internal Error: ASP Result.control not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
assert self.control, error_msg
|
||||
|
||||
symbols = dict((a.literal, a.symbol) for a in self.control.symbolic_atoms)
|
||||
|
||||
@@ -382,7 +390,11 @@ def minimize_core(self, core):
|
||||
ensure unsatisfiability. This algorithm reduces the core to only those
|
||||
essential facts.
|
||||
"""
|
||||
assert self.control
|
||||
error_msg = (
|
||||
"Internal Error: ASP Result.control not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
assert self.control, error_msg
|
||||
|
||||
min_core = core[:]
|
||||
for fact in core:
|
||||
@@ -821,7 +833,8 @@ def key_fn(version):
|
||||
def spec_versions(self, spec):
|
||||
"""Return list of clauses expressing spec's version constraints."""
|
||||
spec = specify(spec)
|
||||
assert spec.name
|
||||
msg = "Internal Error: spec with no name occured. Please report to the spack maintainers."
|
||||
assert spec.name, msg
|
||||
|
||||
if spec.concrete:
|
||||
return [fn.version(spec.name, spec.version)]
|
||||
@@ -930,7 +943,16 @@ def package_compiler_defaults(self, pkg):
|
||||
def package_requirement_rules(self, pkg):
|
||||
pkg_name = pkg.name
|
||||
config = spack.config.get("packages")
|
||||
requirements = config.get(pkg_name, {}).get("require", [])
|
||||
requirements = config.get(pkg_name, {}).get("require", []) or config.get("all", {}).get(
|
||||
"require", []
|
||||
)
|
||||
rules = self._rules_from_requirements(pkg_name, requirements)
|
||||
self.emit_facts_from_requirement_rules(rules, virtual=False)
|
||||
|
||||
def _rules_from_requirements(self, pkg_name, requirements):
|
||||
"""Manipulate requirements from packages.yaml, and return a list of tuples
|
||||
with a uniform structure (name, policy, requirements).
|
||||
"""
|
||||
if isinstance(requirements, string_types):
|
||||
rules = [(pkg_name, "one_of", [requirements])]
|
||||
else:
|
||||
@@ -939,17 +961,7 @@ def package_requirement_rules(self, pkg):
|
||||
for policy in ("one_of", "any_of"):
|
||||
if policy in requirement:
|
||||
rules.append((pkg_name, policy, requirement[policy]))
|
||||
|
||||
for requirement_grp_id, (pkg_name, policy, requirement_grp) in enumerate(rules):
|
||||
self.gen.fact(fn.requirement_group(pkg_name, requirement_grp_id))
|
||||
self.gen.fact(fn.requirement_policy(pkg_name, requirement_grp_id, policy))
|
||||
for requirement_weight, spec_str in enumerate(requirement_grp):
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
member_id = self.condition(spec, imposed_spec=spec, name=pkg_name)
|
||||
self.gen.fact(fn.requirement_group_member(member_id, pkg_name, requirement_grp_id))
|
||||
self.gen.fact(fn.requirement_has_weight(member_id, requirement_weight))
|
||||
return rules
|
||||
|
||||
def pkg_rules(self, pkg, tests):
|
||||
pkg = packagize(pkg)
|
||||
@@ -1043,7 +1055,7 @@ def pkg_rules(self, pkg, tests):
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
@@ -1053,6 +1065,8 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
name (str or None): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
msg (str or None): description of the condition
|
||||
node (bool): if False does not emit "node" or "virtual_node" requirements
|
||||
from the imposed spec
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -1069,7 +1083,7 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
self.gen.fact(fn.condition_requirement(condition_id, pred.name, *pred.args))
|
||||
|
||||
if imposed_spec:
|
||||
self.impose(condition_id, imposed_spec, node=False, name=name)
|
||||
self.impose(condition_id, imposed_spec, node=node, name=name)
|
||||
|
||||
return condition_id
|
||||
|
||||
@@ -1137,12 +1151,47 @@ def virtual_preferences(self, pkg_name, func):
|
||||
|
||||
def provider_defaults(self):
|
||||
self.gen.h2("Default virtual providers")
|
||||
assert self.possible_virtuals is not None
|
||||
msg = (
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
assert self.possible_virtuals is not None, msg
|
||||
self.virtual_preferences(
|
||||
"all",
|
||||
lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i)),
|
||||
)
|
||||
|
||||
def provider_requirements(self):
|
||||
self.gen.h2("Requirements on virtual providers")
|
||||
msg = (
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
packages_yaml = spack.config.config.get("packages")
|
||||
assert self.possible_virtuals is not None, msg
|
||||
for virtual_str in sorted(self.possible_virtuals):
|
||||
requirements = packages_yaml.get(virtual_str, {}).get("require", [])
|
||||
rules = self._rules_from_requirements(virtual_str, requirements)
|
||||
self.emit_facts_from_requirement_rules(rules, virtual=True)
|
||||
|
||||
def emit_facts_from_requirement_rules(self, rules, virtual=False):
|
||||
"""Generate facts to enforce requirements from packages.yaml."""
|
||||
for requirement_grp_id, (pkg_name, policy, requirement_grp) in enumerate(rules):
|
||||
self.gen.fact(fn.requirement_group(pkg_name, requirement_grp_id))
|
||||
self.gen.fact(fn.requirement_policy(pkg_name, requirement_grp_id, policy))
|
||||
for requirement_weight, spec_str in enumerate(requirement_grp):
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
when_spec = spec
|
||||
if virtual:
|
||||
when_spec = spack.spec.Spec(pkg_name)
|
||||
member_id = self.condition(
|
||||
required_spec=when_spec, imposed_spec=spec, name=pkg_name, node=virtual
|
||||
)
|
||||
self.gen.fact(fn.requirement_group_member(member_id, pkg_name, requirement_grp_id))
|
||||
self.gen.fact(fn.requirement_has_weight(member_id, requirement_weight))
|
||||
|
||||
def external_packages(self):
|
||||
"""Facts on external packages, as read from packages.yaml"""
|
||||
# Read packages.yaml and normalize it, so that it
|
||||
@@ -1162,10 +1211,11 @@ def external_packages(self):
|
||||
|
||||
self.gen.h2("External package: {0}".format(pkg_name))
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact.
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
external_buildable = data.get("buildable", True)
|
||||
if not external_buildable:
|
||||
self.gen.fact(fn.external_only(pkg_name))
|
||||
self.gen.fact(fn.buildable_false(pkg_name))
|
||||
|
||||
# Read a list of all the specs for this package
|
||||
externals = data.get("externals", [])
|
||||
@@ -1396,6 +1446,9 @@ class Body(object):
|
||||
|
||||
# dependencies
|
||||
if spec.concrete:
|
||||
# older specs do not have package hashes, so we have to do this carefully
|
||||
if getattr(spec, "_package_hash", None):
|
||||
clauses.append(fn.package_hash(spec.name, spec._package_hash))
|
||||
clauses.append(fn.hash(spec.name, spec.dag_hash()))
|
||||
|
||||
# add all clauses from dependencies
|
||||
@@ -1467,8 +1520,10 @@ def key_fn(item):
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
for idx, v in enumerate(version_preferences):
|
||||
# v can be a string so force it into an actual version for comparisons
|
||||
ver = spack.version.Version(v)
|
||||
self.declared_versions[pkg_name].append(
|
||||
DeclaredVersion(version=v, idx=idx, origin=version_provenance.packages_yaml)
|
||||
DeclaredVersion(version=ver, idx=idx, origin=version_provenance.packages_yaml)
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
@@ -1656,7 +1711,11 @@ def target_defaults(self, specs):
|
||||
|
||||
def virtual_providers(self):
|
||||
self.gen.h2("Virtual providers")
|
||||
assert self.possible_virtuals is not None
|
||||
msg = (
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
assert self.possible_virtuals is not None, msg
|
||||
|
||||
# what provides what
|
||||
for vspec in sorted(self.possible_virtuals):
|
||||
@@ -1908,6 +1967,7 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
self.virtual_providers()
|
||||
self.provider_defaults()
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
self.flag_defaults()
|
||||
|
||||
@@ -2066,7 +2126,7 @@ def depends_on(self, pkg, dep, type):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||
|
||||
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||
msg = "Current solver does not handle multiple dependency edges " "of the same name"
|
||||
msg = "Current solver does not handle multiple dependency edges of the same name"
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
@@ -2156,7 +2216,11 @@ def build_specs(self, function_tuples):
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
assert action and callable(action)
|
||||
msg = (
|
||||
"Internal Error: Uncallable action found in asp.py. Please report to the spack"
|
||||
" maintainers."
|
||||
)
|
||||
assert action and callable(action), msg
|
||||
|
||||
# ignore predicates on virtual packages, as they're used for
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
@@ -2223,10 +2287,17 @@ def _develop_specs_from_env(spec, env):
|
||||
if not dev_info:
|
||||
return
|
||||
|
||||
path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
|
||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
||||
|
||||
if "dev_path" in spec.variants:
|
||||
assert spec.variants["dev_path"].value == path
|
||||
error_msg = (
|
||||
"Internal Error: The dev_path for spec {name} is not connected to a valid environment"
|
||||
"path. Please note that develop specs can only be used inside an environment"
|
||||
"These paths should be the same:\n\tdev_path:{dev_path}\n\tenv_based_path:{env_path}"
|
||||
)
|
||||
error_msg.format(name=spec.name, dev_path=spec.variants["dev_path"], env_path=path)
|
||||
|
||||
assert spec.variants["dev_path"].value == path, error_msg
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", spack.variant.SingleValuedVariant("dev_path", path))
|
||||
spec.constrain(dev_info["spec"])
|
||||
|
||||
@@ -276,7 +276,8 @@ error(0, Msg) :- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID, Msg),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID),
|
||||
not external(Package). % ignore conflicts for externals
|
||||
not external(Package), % ignore conflicts for externals
|
||||
not hash(Package, _). % ignore conflicts for installed packages
|
||||
|
||||
#defined conflict/4.
|
||||
|
||||
@@ -436,7 +437,7 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
#defined external/1.
|
||||
#defined external_spec/2.
|
||||
#defined external_version_declared/4.
|
||||
#defined external_only/1.
|
||||
#defined buildable_false/1.
|
||||
#defined pkg_provider_preference/4.
|
||||
#defined default_provider_preference/3.
|
||||
#defined node_version_satisfies/2.
|
||||
@@ -463,8 +464,10 @@ error(2, "Attempted to use external for '{0}' which does not satisfy any configu
|
||||
version_weight(Package, Weight) :- external_version(Package, Version, Weight).
|
||||
version(Package, Version) :- external_version(Package, Version, Weight).
|
||||
|
||||
% if a package is not buildable (external_only), only externals are allowed
|
||||
external(Package) :- external_only(Package), node(Package).
|
||||
% if a package is not buildable, only externals or hashed specs are allowed
|
||||
external(Package) :- buildable_false(Package),
|
||||
node(Package),
|
||||
not hash(Package, _).
|
||||
|
||||
% a package is a real_node if it is not external
|
||||
real_node(Package) :- node(Package), not external(Package).
|
||||
@@ -483,7 +486,8 @@ external(Package) :- external_spec_selected(Package, _).
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
node(Package).
|
||||
node(Package),
|
||||
not hash(Package, _).
|
||||
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
@@ -504,9 +508,12 @@ error(2, "Attempted to use external for '{0}' which does not satisfy any configu
|
||||
% Config required semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
activate_requirement_rules(Package) :- node(Package).
|
||||
activate_requirement_rules(Package) :- virtual_node(Package).
|
||||
|
||||
requirement_group_satisfied(Package, X) :-
|
||||
1 { condition_holds(Y) : requirement_group_member(Y, Package, X) } 1,
|
||||
node(Package),
|
||||
activate_requirement_rules(Package),
|
||||
requirement_policy(Package, X, "one_of"),
|
||||
requirement_group(Package, X).
|
||||
|
||||
@@ -519,7 +526,7 @@ requirement_weight(Package, W) :-
|
||||
|
||||
requirement_group_satisfied(Package, X) :-
|
||||
1 { condition_holds(Y) : requirement_group_member(Y, Package, X) } ,
|
||||
node(Package),
|
||||
activate_requirement_rules(Package),
|
||||
requirement_policy(Package, X, "any_of"),
|
||||
requirement_group(Package, X).
|
||||
|
||||
@@ -535,7 +542,7 @@ requirement_weight(Package, W) :-
|
||||
requirement_group_satisfied(Package, X).
|
||||
|
||||
error(2, "Cannot satisfy requirement group for package '{0}'", Package) :-
|
||||
node(Package),
|
||||
activate_requirement_rules(Package),
|
||||
requirement_group(Package, X),
|
||||
not requirement_group_satisfied(Package, X).
|
||||
|
||||
@@ -1059,6 +1066,11 @@ no_flags(Package, FlagType)
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- hash(Package, Hash), variant_value(Package, "dev_path", _).
|
||||
|
||||
% You can't install a hash, if it is not installed
|
||||
:- hash(Package, Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- hash(Package, Hash1), hash(Package, Hash2), Hash1 != Hash2.
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash) :- hash(Package, Hash).
|
||||
|
||||
|
||||
@@ -20,3 +20,6 @@ os_compatible("ubuntu20.04", "ubuntu19.10").
|
||||
os_compatible("ubuntu19.10", "ubuntu19.04").
|
||||
os_compatible("ubuntu19.04", "ubuntu18.10").
|
||||
os_compatible("ubuntu18.10", "ubuntu18.04").
|
||||
|
||||
%EL8
|
||||
os_compatible("rhel8", "rocky8").
|
||||
|
||||
@@ -284,6 +284,22 @@ def _string_or_none(s):
|
||||
|
||||
self.platform, self.os, self.target = platform_tuple
|
||||
|
||||
@staticmethod
|
||||
def override(init_spec, change_spec):
|
||||
if init_spec:
|
||||
new_spec = init_spec.copy()
|
||||
else:
|
||||
new_spec = ArchSpec()
|
||||
if change_spec.platform:
|
||||
new_spec.platform = change_spec.platform
|
||||
# TODO: if the platform is changed to something that is incompatible
|
||||
# with the current os, we should implicitly remove it
|
||||
if change_spec.os:
|
||||
new_spec.os = change_spec.os
|
||||
if change_spec.target:
|
||||
new_spec.target = change_spec.target
|
||||
return new_spec
|
||||
|
||||
def _autospec(self, spec_like):
|
||||
if isinstance(spec_like, ArchSpec):
|
||||
return spec_like
|
||||
@@ -1532,16 +1548,7 @@ def package_class(self):
|
||||
|
||||
@property
|
||||
def virtual(self):
|
||||
"""Right now, a spec is virtual if no package exists with its name.
|
||||
|
||||
TODO: revisit this -- might need to use a separate namespace and
|
||||
be more explicit about this.
|
||||
Possible idea: just use conventin and make virtual deps all
|
||||
caps, e.g., MPI vs mpi.
|
||||
"""
|
||||
# This method can be called while regenerating the provider index
|
||||
# So we turn off using the index to detect virtuals
|
||||
return spack.repo.path.is_virtual(self.name, use_index=False)
|
||||
return spack.repo.path.is_virtual(self.name)
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
@@ -2242,6 +2249,33 @@ def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
@staticmethod
|
||||
def override(init_spec, change_spec):
|
||||
# TODO: this doesn't account for the case where the changed spec
|
||||
# (and the user spec) have dependencies
|
||||
new_spec = init_spec.copy()
|
||||
package_cls = spack.repo.path.get_pkg_class(new_spec.name)
|
||||
if change_spec.versions and not change_spec.versions == spack.version.ver(":"):
|
||||
new_spec.versions = change_spec.versions
|
||||
for variant, value in change_spec.variants.items():
|
||||
if variant in package_cls.variants:
|
||||
if variant in new_spec.variants:
|
||||
new_spec.variants.substitute(value)
|
||||
else:
|
||||
new_spec.variants[variant] = value
|
||||
else:
|
||||
raise ValueError("{0} is not a variant of {1}".format(variant, new_spec.name))
|
||||
if change_spec.compiler:
|
||||
new_spec.compiler = change_spec.compiler
|
||||
if change_spec.compiler_flags:
|
||||
for flagname, flagvals in change_spec.compiler_flags.items():
|
||||
new_spec.compiler_flags[flagname] = flagvals
|
||||
if change_spec.architecture:
|
||||
new_spec.architecture = ArchSpec.override(
|
||||
new_spec.architecture, change_spec.architecture
|
||||
)
|
||||
return new_spec
|
||||
|
||||
@staticmethod
|
||||
def from_literal(spec_dict, normal=True):
|
||||
"""Builds a Spec from a dictionary containing the spec literal.
|
||||
@@ -2584,7 +2618,9 @@ def _expand_virtual_packages(self, concretizer):
|
||||
a problem.
|
||||
"""
|
||||
# Make an index of stuff this spec already provides
|
||||
self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
)
|
||||
changed = False
|
||||
done = False
|
||||
|
||||
@@ -3108,7 +3144,7 @@ def _find_provider(self, vdep, provider_index):
|
||||
Raise an exception if there is a conflicting virtual
|
||||
dependency already in this spec.
|
||||
"""
|
||||
assert vdep.virtual
|
||||
assert spack.repo.path.is_virtual_safe(vdep.name), vdep
|
||||
|
||||
# note that this defensively copies.
|
||||
providers = provider_index.providers_for(vdep)
|
||||
@@ -3173,16 +3209,18 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
if dep.virtual:
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
dep = provider
|
||||
else:
|
||||
index = spack.provider_index.ProviderIndex([dep], restrict=True)
|
||||
index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[dep], restrict=True
|
||||
)
|
||||
items = list(spec_deps.items())
|
||||
for name, vspec in items:
|
||||
if not vspec.virtual:
|
||||
if not spack.repo.path.is_virtual_safe(vspec.name):
|
||||
continue
|
||||
|
||||
if index.providers_for(vspec):
|
||||
@@ -3332,7 +3370,7 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
||||
# Initialize index of virtual dependency providers if
|
||||
# concretize didn't pass us one already
|
||||
provider_index = spack.provider_index.ProviderIndex(
|
||||
[s for s in all_spec_deps.values()], restrict=True
|
||||
repository=spack.repo.path, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
)
|
||||
|
||||
# traverse the package DAG and fill out dependencies according
|
||||
@@ -3710,8 +3748,12 @@ def satisfies_dependencies(self, other, strict=False):
|
||||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
|
||||
other_index = spack.provider_index.ProviderIndex(other.traverse(), restrict=True)
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
)
|
||||
other_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=other.traverse(), restrict=True
|
||||
)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
if not self_index.satisfies(other_index):
|
||||
@@ -4013,6 +4055,9 @@ def _cmp_node(self):
|
||||
yield self.compiler_flags
|
||||
yield self.architecture
|
||||
|
||||
# this is not present on older specs
|
||||
yield getattr(self, "_package_hash", None)
|
||||
|
||||
def eq_node(self, other):
|
||||
"""Equality with another spec, not including dependencies."""
|
||||
return (other is not None) and lang.lazy_eq(self._cmp_node, other._cmp_node)
|
||||
@@ -4022,6 +4067,16 @@ def _cmp_iter(self):
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
# are considered the same by `__hash__` or `__eq__`.
|
||||
#
|
||||
# TODO: We should eventually unify the `_cmp_*` methods with `to_node_dict` so
|
||||
# TODO: there aren't two sources of truth, but this needs some thought, since
|
||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||
# TODO: spec hashing.
|
||||
yield self.process_hash() if self.concrete else None
|
||||
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
@@ -4938,7 +4993,7 @@ def __missing__(self, key):
|
||||
|
||||
|
||||
#: These are possible token types in the spec grammar.
|
||||
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
||||
HASH, DEP, VER, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
||||
|
||||
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
|
||||
spec_id_re = r"\w[\w.-]*"
|
||||
@@ -4958,10 +5013,13 @@ def __init__(self):
|
||||
)
|
||||
super(SpecLexer, self).__init__(
|
||||
[
|
||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||
(r"\@", lambda scanner, val: self.token(AT, val)),
|
||||
(
|
||||
r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?",
|
||||
lambda scanner, val: self.token(VER, val),
|
||||
),
|
||||
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
||||
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||
(r"\+", lambda scanner, val: self.token(ON, val)),
|
||||
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
||||
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
||||
@@ -5099,7 +5157,7 @@ def do_parse(self):
|
||||
else:
|
||||
# If the next token can be part of a valid anonymous spec,
|
||||
# create the anonymous spec
|
||||
if self.next.type in (AT, ON, OFF, PCT):
|
||||
if self.next.type in (VER, ON, OFF, PCT):
|
||||
# Raise an error if the previous spec is already concrete
|
||||
if specs and specs[-1].concrete:
|
||||
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
||||
@@ -5207,7 +5265,7 @@ def spec(self, name):
|
||||
spec.name = spec_name
|
||||
|
||||
while self.next:
|
||||
if self.accept(AT):
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
spec._add_versions(vlist)
|
||||
|
||||
@@ -5225,7 +5283,6 @@ def spec(self, name):
|
||||
elif self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
# We're adding a key-value pair to the spec
|
||||
self.expect(VAL)
|
||||
spec._add_flag(self.previous.value, self.token.value)
|
||||
self.previous = None
|
||||
@@ -5261,16 +5318,24 @@ def variant(self, name=None):
|
||||
return self.token.value
|
||||
|
||||
def version(self):
|
||||
|
||||
start = None
|
||||
end = None
|
||||
if self.accept(ID):
|
||||
start = self.token.value
|
||||
if self.accept(EQ):
|
||||
# This is for versions that are associated with a hash
|
||||
# i.e. @[40 char hash]=version
|
||||
start += self.token.value
|
||||
self.expect(VAL)
|
||||
start += self.token.value
|
||||
|
||||
def str_translate(value):
|
||||
# return None for empty strings since we can end up with `'@'.strip('@')`
|
||||
if not (value and value.strip()):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
if self.token.type is COMMA:
|
||||
# need to increment commas, could be ID or COLON
|
||||
self.accept(ID)
|
||||
|
||||
if self.token.type in (VER, ID):
|
||||
version_spec = self.token.value.lstrip("@")
|
||||
start = str_translate(version_spec)
|
||||
|
||||
if self.accept(COLON):
|
||||
if self.accept(ID):
|
||||
@@ -5280,10 +5345,10 @@ def version(self):
|
||||
else:
|
||||
end = self.token.value
|
||||
elif start:
|
||||
# No colon, but there was a version.
|
||||
# No colon, but there was a version
|
||||
return vn.Version(start)
|
||||
else:
|
||||
# No colon and no id: invalid version.
|
||||
# No colon and no id: invalid version
|
||||
self.next_token_error("Invalid version specifier")
|
||||
|
||||
if start:
|
||||
@@ -5306,7 +5371,7 @@ def compiler(self):
|
||||
compiler = CompilerSpec.__new__(CompilerSpec)
|
||||
compiler.name = self.token.value
|
||||
compiler.versions = vn.VersionList()
|
||||
if self.accept(AT):
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
compiler._add_versions(vlist)
|
||||
else:
|
||||
|
||||
@@ -34,6 +34,13 @@ def __init__(self, name="specs", yaml_list=None, reference=None):
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
@property
|
||||
def is_matrix(self):
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def specs_as_yaml_list(self):
|
||||
if self._expanded_list is None:
|
||||
|
||||
@@ -42,6 +42,7 @@
|
||||
import spack.util.pattern as pattern
|
||||
import spack.util.url as url_util
|
||||
from spack.util.crypto import bit_length, prefix_bits
|
||||
from spack.util.web import FetchError
|
||||
|
||||
# The well-known stage source subdirectory name.
|
||||
_source_path_subdir = "spack-src"
|
||||
@@ -529,7 +530,7 @@ def print_errors(errors):
|
||||
|
||||
self.fetcher = self.default_fetcher
|
||||
default_msg = "All fetchers failed for {0}".format(self.name)
|
||||
raise fs.FetchError(err_msg or default_msg, None)
|
||||
raise FetchError(err_msg or default_msg, None)
|
||||
|
||||
print_errors(errors)
|
||||
|
||||
|
||||
@@ -102,8 +102,8 @@ def __init__(self):
|
||||
|
||||
def restore(self):
|
||||
if _serialize:
|
||||
spack.repo.path = spack.repo._path(self.repo_dirs)
|
||||
spack.config.config = self.config
|
||||
spack.repo.path = spack.repo._path(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
|
||||
new_store = spack.store.Store.deserialize(self.store_token)
|
||||
|
||||
@@ -50,8 +50,9 @@ def packages_with_tags(tags, installed, skip_empty):
|
||||
class TagIndex(Mapping):
|
||||
"""Maps tags to list of packages."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, repository):
|
||||
self._tag_dict = collections.defaultdict(list)
|
||||
self.repository = repository
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
@@ -61,7 +62,7 @@ def to_json(self, stream):
|
||||
sjson.dump({"tags": self._tag_dict}, stream)
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
def from_json(stream, repository):
|
||||
d = sjson.load(stream)
|
||||
|
||||
if not isinstance(d, dict):
|
||||
@@ -70,7 +71,7 @@ def from_json(stream):
|
||||
if "tags" not in d:
|
||||
raise TagIndexError("TagIndex data does not start with 'tags'")
|
||||
|
||||
r = TagIndex()
|
||||
r = TagIndex(repository=repository)
|
||||
|
||||
for tag, packages in d["tags"].items():
|
||||
r[tag].extend(packages)
|
||||
@@ -88,7 +89,7 @@ def __len__(self):
|
||||
|
||||
def copy(self):
|
||||
"""Return a deep copy of this index."""
|
||||
clone = TagIndex()
|
||||
clone = TagIndex(repository=self.repository)
|
||||
clone._tag_dict = copy.deepcopy(self._tag_dict)
|
||||
return clone
|
||||
|
||||
@@ -117,9 +118,8 @@ def update_package(self, pkg_name):
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.config
|
||||
import spack.extensions
|
||||
from spack.util.path import canonicalize_path
|
||||
|
||||
|
||||
|
||||
@@ -9,18 +9,20 @@
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"packages,expected_error",
|
||||
# PKG-PROPERTIES are ubiquitous in mock packages, since they don't use sha256
|
||||
# and they don't change the example.com URL very often.
|
||||
"packages,expected_errors",
|
||||
[
|
||||
# A non existing variant is used in a conflict directive
|
||||
(["wrong-variant-in-conflicts"], "PKG-DIRECTIVES"),
|
||||
(["wrong-variant-in-conflicts"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# The package declares a non-existing dependency
|
||||
(["missing-dependency"], "PKG-DIRECTIVES"),
|
||||
(["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# The package use a non existing variant in a depends_on directive
|
||||
(["wrong-variant-in-depends-on"], "PKG-DIRECTIVES"),
|
||||
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(["invalid-github-patch-url"], "PKG-DIRECTIVES"),
|
||||
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone 'test' method in build-time callbacks
|
||||
(["test-build-callbacks"], "PKG-DIRECTIVES"),
|
||||
(["test-build-callbacks"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has no issues
|
||||
(["mpileaks"], None),
|
||||
# This package has a conflict with a trigger which cannot constrain the constraint
|
||||
@@ -28,15 +30,16 @@
|
||||
(["unconstrainable-conflict"], None),
|
||||
],
|
||||
)
|
||||
def test_package_audits(packages, expected_error, mock_packages):
|
||||
def test_package_audits(packages, expected_errors, mock_packages):
|
||||
reports = spack.audit.run_group("packages", pkgs=packages)
|
||||
|
||||
# Check that errors were reported only for the expected failure
|
||||
actual_errors = [check for check, errors in reports if errors]
|
||||
if expected_error:
|
||||
assert [expected_error] == actual_errors
|
||||
msg = [str(e) for _, errors in reports for e in errors]
|
||||
if expected_errors:
|
||||
assert expected_errors == actual_errors, msg
|
||||
else:
|
||||
assert not actual_errors
|
||||
assert not actual_errors, msg
|
||||
|
||||
|
||||
# Data used in the test below to audit the double definition of a compiler
|
||||
|
||||
@@ -97,12 +97,12 @@ def config_directory(tmpdir_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def default_config(tmpdir_factory, config_directory, monkeypatch, install_mockery_mutable_config):
|
||||
def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutable_config):
|
||||
# This fixture depends on install_mockery_mutable_config to ensure
|
||||
# there is a clear order of initialization. The substitution of the
|
||||
# config scopes here is done on top of the substitution that comes with
|
||||
# install_mockery_mutable_config
|
||||
mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
|
||||
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
|
||||
config_directory.copy(mutable_dir)
|
||||
|
||||
cfg = spack.config.Configuration(
|
||||
@@ -113,7 +113,7 @@ def default_config(tmpdir_factory, config_directory, monkeypatch, install_mocker
|
||||
)
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
|
||||
@@ -134,7 +134,7 @@ def test_config_yaml_is_preserved_during_bootstrap(mutable_config):
|
||||
|
||||
|
||||
@pytest.mark.regression("26548")
|
||||
def test_custom_store_in_environment(mutable_config, tmpdir):
|
||||
def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
|
||||
# Test that the custom store in an environment is taken into account
|
||||
# during bootstrapping
|
||||
spack_yaml = tmpdir.join("spack.yaml")
|
||||
|
||||
@@ -177,6 +177,14 @@ def _set_wrong_cc(x):
|
||||
assert os.environ["ANOTHER_VAR"] == "THIS_IS_SET"
|
||||
|
||||
|
||||
def test_setup_dependent_package_inherited_modules(
|
||||
config, working_env, mock_packages, install_mockery, mock_fetch
|
||||
):
|
||||
# This will raise on regression
|
||||
s = spack.spec.Spec("cmake-client-inheritor").concretized()
|
||||
s.package.do_install()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"initial,modifications,expected",
|
||||
[
|
||||
|
||||
@@ -62,3 +62,36 @@ def test_build_request_strings(install_mockery):
|
||||
istr = str(request)
|
||||
assert "package=dependent-install" in istr
|
||||
assert "install_args=" in istr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes",
|
||||
[
|
||||
(False, False, ["build", "link", "run"], ["build", "link", "run"]),
|
||||
(True, False, ["link", "run"], ["build", "link", "run"]),
|
||||
(False, True, ["build", "link", "run"], ["link", "run"]),
|
||||
(True, True, ["link", "run"], ["link", "run"]),
|
||||
],
|
||||
)
|
||||
def test_build_request_deptypes(
|
||||
install_mockery,
|
||||
package_cache_only,
|
||||
dependencies_cache_only,
|
||||
package_deptypes,
|
||||
dependencies_deptypes,
|
||||
):
|
||||
s = spack.spec.Spec("dependent-install").concretized()
|
||||
|
||||
build_request = inst.BuildRequest(
|
||||
s.package,
|
||||
{
|
||||
"package_cache_only": package_cache_only,
|
||||
"dependencies_cache_only": dependencies_cache_only,
|
||||
},
|
||||
)
|
||||
|
||||
actual_package_deptypes = build_request.get_deptypes(s.package)
|
||||
actual_dependency_deptypes = build_request.get_deptypes(s["dependency-install"].package)
|
||||
|
||||
assert sorted(actual_package_deptypes) == package_deptypes
|
||||
assert sorted(actual_dependency_deptypes) == dependencies_deptypes
|
||||
|
||||
@@ -176,6 +176,33 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
ci.download_and_extract_artifacts(url, working_dir)
|
||||
|
||||
|
||||
def test_ci_copy_stage_logs_to_artifacts_fail(tmpdir, config, mock_packages, monkeypatch, capfd):
|
||||
"""The copy will fail because the spec is not concrete so does not have
|
||||
a package."""
|
||||
log_dir = tmpdir.join("log_dir")
|
||||
s = spec.Spec("printing-package").concretized()
|
||||
|
||||
ci.copy_stage_logs_to_artifacts(s, log_dir)
|
||||
_, err = capfd.readouterr()
|
||||
assert "Unable to copy files" in err
|
||||
assert "No such file or directory" in err
|
||||
|
||||
|
||||
def test_ci_copy_test_logs_to_artifacts_fail(tmpdir, capfd):
|
||||
log_dir = tmpdir.join("log_dir")
|
||||
|
||||
ci.copy_test_logs_to_artifacts("no-such-dir", log_dir)
|
||||
_, err = capfd.readouterr()
|
||||
assert "Cannot copy test logs" in err
|
||||
|
||||
stage_dir = tmpdir.join("stage_dir").strpath
|
||||
os.makedirs(stage_dir)
|
||||
ci.copy_test_logs_to_artifacts(stage_dir, log_dir)
|
||||
_, err = capfd.readouterr()
|
||||
assert "Unable to copy files" in err
|
||||
assert "No such file or directory" in err
|
||||
|
||||
|
||||
def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypatch):
|
||||
c1, c2 = last_two_git_commits
|
||||
repro_dir = os.path.join(tmpdir.strpath, "repro")
|
||||
@@ -457,6 +484,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
||||
assert affected_pkg_names == expected_affected_pkg_names
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.regression("29947")
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
||||
e = ev.create("first_concretization")
|
||||
@@ -467,3 +495,154 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
||||
affected_specs = spack.ci.get_spec_filter_list(e, ["zlib"])
|
||||
hdf5_specs = [s for s in affected_specs if s.name == "hdf5"]
|
||||
assert len(hdf5_specs) == 2
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||
)
|
||||
def test_ci_process_command(tmpdir):
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
result = ci.process_command("help", [], repro_dir)
|
||||
|
||||
assert os.path.exists(fs.join_path(repro_dir, "help.sh"))
|
||||
assert not result
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||
)
|
||||
def test_ci_process_command_fail(tmpdir, monkeypatch):
|
||||
import subprocess
|
||||
|
||||
err = "subprocess wait exception"
|
||||
|
||||
def _fail(self, args):
|
||||
raise RuntimeError(err)
|
||||
|
||||
monkeypatch.setattr(subprocess.Popen, "__init__", _fail)
|
||||
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
|
||||
with pytest.raises(RuntimeError, match=err):
|
||||
ci.process_command("help", [], repro_dir)
|
||||
|
||||
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
# Monkeypatching ci method tested elsewhere to reduce number of methods
|
||||
# that would need to be patched here.
|
||||
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c, d: None)
|
||||
|
||||
args = {
|
||||
"env": None,
|
||||
"buildcache_mirror_url": "file://fake-url",
|
||||
"pipeline_mirror_url": "file://fake-url",
|
||||
}
|
||||
ci.create_buildcache(**args)
|
||||
|
||||
|
||||
def test_ci_run_standalone_tests_missing_requirements(
|
||||
tmpdir, working_env, config, mock_packages, capfd
|
||||
):
|
||||
"""This test case checks for failing prerequisite checks."""
|
||||
ci.run_standalone_tests()
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Job spec is required" in err
|
||||
|
||||
args = {"job_spec": spec.Spec("printing-package").concretized()}
|
||||
ci.run_standalone_tests(**args)
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Reproduction directory is required" in err
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmpdir, working_env, config, mock_packages, mock_test_stage, capfd
|
||||
):
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"job_spec": spec.Spec("printing-package").concretized(),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
"fail_fast": True,
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
ci.run_standalone_tests(**args)
|
||||
err = capfd.readouterr()[1]
|
||||
assert "No installed packages" in err
|
||||
assert os.path.getsize(log_file) > 0
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script ot supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmpdir, working_env, config, mock_packages, mock_test_stage, capfd
|
||||
):
|
||||
"""Test run_standalone_tests with cdash and related options."""
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"job_spec": spec.Spec("printing-package").concretized(),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
# Cover when CDash handler provided (with the log file as well)
|
||||
ci_cdash = {
|
||||
"url": "file://fake",
|
||||
"build-group": "fake-group",
|
||||
"project": "ci-unit-testing",
|
||||
"site": "fake-site",
|
||||
}
|
||||
os.environ["SPACK_CDASH_BUILD_NAME"] = "ci-test-build"
|
||||
os.environ["SPACK_CDASH_BUILD_STAMP"] = "ci-test-build-stamp"
|
||||
os.environ["CI_RUNNER_DESCRIPTION"] = "test-runner"
|
||||
handler = ci.CDashHandler(ci_cdash)
|
||||
args["cdash"] = handler
|
||||
ci.run_standalone_tests(**args)
|
||||
out = capfd.readouterr()[0]
|
||||
# CDash *and* log file output means log file ignored
|
||||
assert "xml option is ignored" in out
|
||||
assert "0 passed of 0" in out
|
||||
|
||||
# copy test results (though none)
|
||||
artifacts_dir = tmpdir.join("artifacts")
|
||||
fs.mkdirp(artifacts_dir.strpath)
|
||||
handler.copy_test_results(tmpdir.strpath, artifacts_dir.strpath)
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Unable to copy files" in err
|
||||
assert "No such file or directory" in err
|
||||
|
||||
|
||||
def test_ci_skipped_report(tmpdir, mock_packages, config):
|
||||
"""Test explicit skipping of report as well as CI's 'package' arg."""
|
||||
pkg = "trivial-smoke-test"
|
||||
spec = spack.spec.Spec(pkg).concretized()
|
||||
ci_cdash = {
|
||||
"url": "file://fake",
|
||||
"build-group": "fake-group",
|
||||
"project": "ci-unit-testing",
|
||||
"site": "fake-site",
|
||||
}
|
||||
os.environ["SPACK_CDASH_BUILD_NAME"] = "fake-test-build"
|
||||
os.environ["SPACK_CDASH_BUILD_STAMP"] = "ci-test-build-stamp"
|
||||
os.environ["CI_RUNNER_DESCRIPTION"] = "test-runner"
|
||||
handler = ci.CDashHandler(ci_cdash)
|
||||
reason = "Testing skip"
|
||||
handler.report_skipped(spec, tmpdir.strpath, reason=reason)
|
||||
|
||||
report = fs.join_path(tmpdir, "{0}_Testing.xml".format(pkg))
|
||||
expected = "Skipped {0} package".format(pkg)
|
||||
with open(report, "r") as f:
|
||||
have = [0, 0]
|
||||
for line in f:
|
||||
if expected in line:
|
||||
have[0] += 1
|
||||
elif reason in line:
|
||||
have[1] += 1
|
||||
assert all(count == 1 for count in have)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user