Compare commits
518 Commits
test/envir
...
revert-363
Author | SHA1 | Date | |
---|---|---|---|
![]() |
37afca39ae | ||
![]() |
b0e54bc0ac | ||
![]() |
d20fee0c42 | ||
![]() |
fdd94d1ee9 | ||
![]() |
fa37ff51e7 | ||
![]() |
2853051e48 | ||
![]() |
862e9a59c4 | ||
![]() |
4dc9d9f60e | ||
![]() |
3d597e29be | ||
![]() |
739a67eda8 | ||
![]() |
47d710dc4d | ||
![]() |
101c5b51bb | ||
![]() |
f4e4d83a02 | ||
![]() |
68979f8740 | ||
![]() |
37fbfcf7fe | ||
![]() |
311d3be18e | ||
![]() |
2393e456ee | ||
![]() |
e09caf2ab8 | ||
![]() |
f15efd27bd | ||
![]() |
668fb7f5dd | ||
![]() |
e1a5228a16 | ||
![]() |
8a48f9a479 | ||
![]() |
6551ad8711 | ||
![]() |
a2479c13a6 | ||
![]() |
59fecb353c | ||
![]() |
d0098876e0 | ||
![]() |
8d2f08ae85 | ||
![]() |
d71ee98bad | ||
![]() |
ed989be8eb | ||
![]() |
00d45d052d | ||
![]() |
f86f30ad71 | ||
![]() |
4f4c9f440e | ||
![]() |
848ab435a5 | ||
![]() |
2418bf446d | ||
![]() |
893bb8d7c7 | ||
![]() |
9e6d048af2 | ||
![]() |
d17321ffc0 | ||
![]() |
38912d17f7 | ||
![]() |
3185bd81b1 | ||
![]() |
25035a302e | ||
![]() |
85c1b16213 | ||
![]() |
e2bc51fcad | ||
![]() |
c3b56f789c | ||
![]() |
492ec0e783 | ||
![]() |
653057e93a | ||
![]() |
87c1cfaf03 | ||
![]() |
647bb5124e | ||
![]() |
7c646a5dbd | ||
![]() |
692d624f45 | ||
![]() |
98adc0b3f9 | ||
![]() |
628dbce6f6 | ||
![]() |
49079d6f88 | ||
![]() |
ff23a2a2ee | ||
![]() |
725389ff32 | ||
![]() |
781959603d | ||
![]() |
787fe3283f | ||
![]() |
c9c2b5e6bb | ||
![]() |
97bdf28b29 | ||
![]() |
f49e9591b7 | ||
![]() |
a0bc32c319 | ||
![]() |
52bcd0eda1 | ||
![]() |
2e9d0e146e | ||
![]() |
84ab72557a | ||
![]() |
1d62d9460d | ||
![]() |
b9f32b1e7a | ||
![]() |
2b539129f0 | ||
![]() |
9288ece826 | ||
![]() |
9b09d8bc49 | ||
![]() |
4d90f464e1 | ||
![]() |
6edc480736 | ||
![]() |
649e9ae0ad | ||
![]() |
98ece85e63 | ||
![]() |
fa57e62744 | ||
![]() |
880c819d97 | ||
![]() |
5fedb10370 | ||
![]() |
9787253842 | ||
![]() |
3984a1e159 | ||
![]() |
bfca1729fa | ||
![]() |
8d8a008ef2 | ||
![]() |
b7505aa726 | ||
![]() |
2cecb4b00c | ||
![]() |
8695d96bd1 | ||
![]() |
fa0749bfb8 | ||
![]() |
b431c4dc06 | ||
![]() |
c3e41153ac | ||
![]() |
e1752ca382 | ||
![]() |
2bcd4e0ecd | ||
![]() |
550bda3096 | ||
![]() |
334bc69a64 | ||
![]() |
88d78025a6 | ||
![]() |
7e981d83fd | ||
![]() |
b28e9e651d | ||
![]() |
5dc8ed2694 | ||
![]() |
199f71ea48 | ||
![]() |
b8e5fc061d | ||
![]() |
b77a4331bc | ||
![]() |
adcdf4a7e2 | ||
![]() |
dfd63ccd73 | ||
![]() |
2bfcfd1f72 | ||
![]() |
7518362706 | ||
![]() |
13d8bc47c8 | ||
![]() |
d5c0d1ce58 | ||
![]() |
46bd481124 | ||
![]() |
e92b996db9 | ||
![]() |
eb1723332e | ||
![]() |
3afef0635f | ||
![]() |
032385ae51 | ||
![]() |
7a9578ce7d | ||
![]() |
e155df5ada | ||
![]() |
005af3e755 | ||
![]() |
85e721c16c | ||
![]() |
e61ae290a2 | ||
![]() |
782d3b889a | ||
![]() |
3a7e5372d0 | ||
![]() |
114e9b528f | ||
![]() |
8e3021cdb1 | ||
![]() |
9542d46395 | ||
![]() |
0825e9a95e | ||
![]() |
b586c8cf1d | ||
![]() |
eba3f5503b | ||
![]() |
e30a89fb7c | ||
![]() |
0646c953e5 | ||
![]() |
d6d68b892a | ||
![]() |
8b1c5d910d | ||
![]() |
a800361344 | ||
![]() |
631a3d849f | ||
![]() |
af09297a76 | ||
![]() |
1b27a2dda5 | ||
![]() |
3ebe5939e3 | ||
![]() |
c9a4bf8d3f | ||
![]() |
973e37823c | ||
![]() |
41d7fe0a50 | ||
![]() |
1af863a1e3 | ||
![]() |
75714d30f5 | ||
![]() |
d5e30ac5f1 | ||
![]() |
8c4265f033 | ||
![]() |
b8b6ae42a0 | ||
![]() |
5532350d4b | ||
![]() |
620effec1b | ||
![]() |
df97827a7b | ||
![]() |
4ffdde94ef | ||
![]() |
5b04146f8a | ||
![]() |
eddbbb867d | ||
![]() |
32154e6fc7 | ||
![]() |
6618b0c830 | ||
![]() |
d84c6ad29e | ||
![]() |
2f07c64f2d | ||
![]() |
ca5cab8498 | ||
![]() |
5f8ee20c7c | ||
![]() |
fd70a2cc07 | ||
![]() |
31201f91bc | ||
![]() |
da0b76047d | ||
![]() |
0478e5f684 | ||
![]() |
4f7c147d50 | ||
![]() |
73a887ee7c | ||
![]() |
8195f27a66 | ||
![]() |
a60fa7ff7d | ||
![]() |
6272853030 | ||
![]() |
507b42c54f | ||
![]() |
3897c1308e | ||
![]() |
b54d208aea | ||
![]() |
612aa744f6 | ||
![]() |
97193a25ce | ||
![]() |
5bf96561ee | ||
![]() |
f2ba1d276b | ||
![]() |
4e060ba933 | ||
![]() |
dd15c37021 | ||
![]() |
141c154948 | ||
![]() |
e51447c2c0 | ||
![]() |
a84fb716a0 | ||
![]() |
a11f06885f | ||
![]() |
8517a74f37 | ||
![]() |
34ef01a5c8 | ||
![]() |
86e49a63ce | ||
![]() |
d76845e875 | ||
![]() |
97d6c741b0 | ||
![]() |
09fd3e8e61 | ||
![]() |
e341dac014 | ||
![]() |
38383743e7 | ||
![]() |
8b94cc4ec2 | ||
![]() |
0a55b44092 | ||
![]() |
d97bb895e8 | ||
![]() |
e8482d9e79 | ||
![]() |
3f3565e890 | ||
![]() |
3bb35fbaf6 | ||
![]() |
4572052c63 | ||
![]() |
ba00da61e4 | ||
![]() |
825599a510 | ||
![]() |
4f6f1b620f | ||
![]() |
08dc2d4020 | ||
![]() |
6af84c4574 | ||
![]() |
50cc1d12f9 | ||
![]() |
c29168eff1 | ||
![]() |
5ed1efab40 | ||
![]() |
887d70410d | ||
![]() |
a9936141ee | ||
![]() |
5744fc3637 | ||
![]() |
b13c201f46 | ||
![]() |
e2ab46251b | ||
![]() |
193c927bd2 | ||
![]() |
9d195da8ee | ||
![]() |
132b89178e | ||
![]() |
6491e08f5d | ||
![]() |
bb73dfc02e | ||
![]() |
64fa902ba6 | ||
![]() |
1a8eefe09b | ||
![]() |
85d51bfd9a | ||
![]() |
e5d78e3780 | ||
![]() |
99893a6475 | ||
![]() |
5f8f89b9c9 | ||
![]() |
028535030c | ||
![]() |
0e295afb1c | ||
![]() |
e58c84e63e | ||
![]() |
37904c3342 | ||
![]() |
9f116c7bb1 | ||
![]() |
b5f3b5bf78 | ||
![]() |
93887edba8 | ||
![]() |
cd42fc5cc8 | ||
![]() |
9a1254063a | ||
![]() |
32f8ee6d58 | ||
![]() |
25239924fa | ||
![]() |
7b27cd2f94 | ||
![]() |
02e579d23d | ||
![]() |
6add885bb2 | ||
![]() |
96b205ce6c | ||
![]() |
1711e186fe | ||
![]() |
16f70ca78d | ||
![]() |
2437a1d554 | ||
![]() |
a6432bc770 | ||
![]() |
ae6902b7ab | ||
![]() |
40019dacd9 | ||
![]() |
5c48304d07 | ||
![]() |
bab2f0a1b0 | ||
![]() |
ecc781fb3c | ||
![]() |
1691b7caac | ||
![]() |
4f848f9200 | ||
![]() |
08298b6766 | ||
![]() |
11a509a40e | ||
![]() |
e3a7ad8112 | ||
![]() |
6efec2b2bd | ||
![]() |
ecd6fc00fd | ||
![]() |
87dc28a2f7 | ||
![]() |
b2633e9057 | ||
![]() |
116bc396c2 | ||
![]() |
39049e2bde | ||
![]() |
309969053e | ||
![]() |
3fbd06023c | ||
![]() |
853b964947 | ||
![]() |
f7da7db9b2 | ||
![]() |
5bae742826 | ||
![]() |
03636cd6ac | ||
![]() |
ee1ea1f430 | ||
![]() |
ff019f868b | ||
![]() |
2bbc6390dc | ||
![]() |
2107b6bf00 | ||
![]() |
31de7ea56c | ||
![]() |
55870efbcc | ||
![]() |
c38b463954 | ||
![]() |
5a4bc51bc0 | ||
![]() |
528aca7c88 | ||
![]() |
9fcfdf7a97 | ||
![]() |
66bf9bc7a6 | ||
![]() |
dee5cb1aeb | ||
![]() |
25666f9254 | ||
![]() |
d78d112f18 | ||
![]() |
1a97fddf5a | ||
![]() |
29d989a048 | ||
![]() |
79bba432df | ||
![]() |
ef4971d2e1 | ||
![]() |
1cc7ea651a | ||
![]() |
16fd615fad | ||
![]() |
61af6b8f37 | ||
![]() |
7c3c6011de | ||
![]() |
36d6660739 | ||
![]() |
9199dabc0b | ||
![]() |
9f6b2f8e96 | ||
![]() |
ba1fd789e0 | ||
![]() |
013b2dec1e | ||
![]() |
d9cf959010 | ||
![]() |
a76066ec42 | ||
![]() |
8ce6a5355e | ||
![]() |
e61a1a6e74 | ||
![]() |
16d7270700 | ||
![]() |
e77e93b66a | ||
![]() |
0c2a801ff2 | ||
![]() |
c84ce77969 | ||
![]() |
3464570b55 | ||
![]() |
2a1428e5d4 | ||
![]() |
6f15cef281 | ||
![]() |
6fbda46c12 | ||
![]() |
6c9d079cfb | ||
![]() |
a741350e69 | ||
![]() |
fe5865da0d | ||
![]() |
1e9a654f17 | ||
![]() |
844701b974 | ||
![]() |
1d081565db | ||
![]() |
39abe69c97 | ||
![]() |
f5228cf59c | ||
![]() |
08d7f47278 | ||
![]() |
92c6112991 | ||
![]() |
3605105cf1 | ||
![]() |
26fd1ac5b0 | ||
![]() |
e1301df60c | ||
![]() |
181bb54372 | ||
![]() |
f3595da600 | ||
![]() |
16c67ff9b4 | ||
![]() |
ce7409bbf7 | ||
![]() |
369914c3e1 | ||
![]() |
64e0ca5a89 | ||
![]() |
51a5377ceb | ||
![]() |
243627104e | ||
![]() |
e817b0b9d0 | ||
![]() |
eb59097576 | ||
![]() |
73c1f3f893 | ||
![]() |
566fb51d71 | ||
![]() |
617f44f9ed | ||
![]() |
a51f4b77d9 | ||
![]() |
9e6afc7dec | ||
![]() |
68874a72fb | ||
![]() |
e560beed19 | ||
![]() |
de586bb66c | ||
![]() |
846cd05c7e | ||
![]() |
1ef313b604 | ||
![]() |
73026f4f4b | ||
![]() |
08dd6d1a21 | ||
![]() |
e9173a59fd | ||
![]() |
7401c97037 | ||
![]() |
15433cfaf1 | ||
![]() |
99aa0ef0cd | ||
![]() |
28934e5f77 | ||
![]() |
f08598427d | ||
![]() |
cc4f7c224a | ||
![]() |
ee5b2936e4 | ||
![]() |
f7a6446d3f | ||
![]() |
624e28ee03 | ||
![]() |
784e5f5789 | ||
![]() |
180618b25a | ||
![]() |
aefcce51fc | ||
![]() |
884a356b1e | ||
![]() |
ee69f2d516 | ||
![]() |
bc5bb06f1f | ||
![]() |
1b8561f752 | ||
![]() |
7d54c24939 | ||
![]() |
960923287d | ||
![]() |
4a9ffdcfa2 | ||
![]() |
22d4e79037 | ||
![]() |
2777ca83eb | ||
![]() |
a2423f5736 | ||
![]() |
81765e0278 | ||
![]() |
0d4f9b26b8 | ||
![]() |
87c21a58d1 | ||
![]() |
5900378cff | ||
![]() |
d54611af2c | ||
![]() |
39adb65dc7 | ||
![]() |
db15e1895f | ||
![]() |
7610926e5e | ||
![]() |
703f687ca0 | ||
![]() |
983a56e729 | ||
![]() |
cbd0770497 | ||
![]() |
b06648eb64 | ||
![]() |
80d784c401 | ||
![]() |
5b3ad0adaa | ||
![]() |
3feadc0a36 | ||
![]() |
8ec86e05c4 | ||
![]() |
b34fd98915 | ||
![]() |
a93d143f17 | ||
![]() |
d0ced9da94 | ||
![]() |
c37d6f97dc | ||
![]() |
ec73157a34 | ||
![]() |
e447c365ee | ||
![]() |
c5c67145d3 | ||
![]() |
a5bc83d635 | ||
![]() |
1760553b70 | ||
![]() |
62d9bf5fef | ||
![]() |
cb49da1b6f | ||
![]() |
c79d9ac5bd | ||
![]() |
871ca3e805 | ||
![]() |
89176bd3f6 | ||
![]() |
b29a607ceb | ||
![]() |
0c06ecc711 | ||
![]() |
73d1e36da5 | ||
![]() |
0d57c2ab24 | ||
![]() |
272e69b2fd | ||
![]() |
8efde89c0e | ||
![]() |
c7ec47c658 | ||
![]() |
013e82f74f | ||
![]() |
fff7e6d626 | ||
![]() |
ac1fe8765a | ||
![]() |
acbf46d786 | ||
![]() |
a753fa12fb | ||
![]() |
27b2dc1608 | ||
![]() |
d8f8b42bcb | ||
![]() |
3995428ad2 | ||
![]() |
76d41b7f9f | ||
![]() |
a7501105b1 | ||
![]() |
da33334488 | ||
![]() |
68dfcd10e6 | ||
![]() |
4e6a8a40b7 | ||
![]() |
20f663d089 | ||
![]() |
acf61daf99 | ||
![]() |
a0233d2560 | ||
![]() |
8c989e0aee | ||
![]() |
2f5e7fb38c | ||
![]() |
b9bc911921 | ||
![]() |
462df718ff | ||
![]() |
b20271a8e8 | ||
![]() |
a62992b4b1 | ||
![]() |
818459e6fc | ||
![]() |
38bd499c09 | ||
![]() |
f9de4c2da8 | ||
![]() |
335ae31a59 | ||
![]() |
c4e5ee8831 | ||
![]() |
73c358819b | ||
![]() |
6f396aff99 | ||
![]() |
047b99fab9 | ||
![]() |
a12a290ee1 | ||
![]() |
bd6c9085f0 | ||
![]() |
c0a0d60378 | ||
![]() |
bc84ca126e | ||
![]() |
3bb7570e02 | ||
![]() |
a5b80662ae | ||
![]() |
0c5360e3fd | ||
![]() |
2ff337a2a5 | ||
![]() |
f3841774f7 | ||
![]() |
97c2dd3a5a | ||
![]() |
3aae80ca07 | ||
![]() |
973bc92813 | ||
![]() |
42a02411b4 | ||
![]() |
4561536403 | ||
![]() |
6b694749d3 | ||
![]() |
c0f48b30cf | ||
![]() |
046416479a | ||
![]() |
b17113b63d | ||
![]() |
479f5a74a3 | ||
![]() |
895886959f | ||
![]() |
0bdb0b07fd | ||
![]() |
817b59900a | ||
![]() |
2ddd66ca48 | ||
![]() |
7ddd796f89 | ||
![]() |
1e2ef16b39 | ||
![]() |
77355fd348 | ||
![]() |
1facf99f51 | ||
![]() |
07c8939679 | ||
![]() |
28f4b5729a | ||
![]() |
dd7af323ed | ||
![]() |
2ba1f700e6 | ||
![]() |
739ab89b65 | ||
![]() |
7e5099627f | ||
![]() |
eb29889f6e | ||
![]() |
ae27df4113 | ||
![]() |
ae2c8c02a2 | ||
![]() |
caff9e4292 | ||
![]() |
beb3414b4d | ||
![]() |
ed07cee852 | ||
![]() |
7c1a164219 | ||
![]() |
18e0b893f2 | ||
![]() |
df5b25764b | ||
![]() |
44705b0a6e | ||
![]() |
3bb03ea7d1 | ||
![]() |
3c411bf135 | ||
![]() |
f2363c1cb5 | ||
![]() |
7188eeb604 | ||
![]() |
583d89e95a | ||
![]() |
4a24401ed0 | ||
![]() |
2796794b19 | ||
![]() |
dd854e86d9 | ||
![]() |
1500246ab7 | ||
![]() |
206cd94c0b | ||
![]() |
ff1a425c8d | ||
![]() |
ab999d5af9 | ||
![]() |
513ff34e66 | ||
![]() |
b59c8e9a43 | ||
![]() |
9483e34d15 | ||
![]() |
7e02139cad | ||
![]() |
a08d86c201 | ||
![]() |
4e70532cd1 | ||
![]() |
2d2a1c82d4 | ||
![]() |
a47ebe5784 | ||
![]() |
4f97bb118f | ||
![]() |
d71eca74a3 | ||
![]() |
f88dec43df | ||
![]() |
2e8306d324 | ||
![]() |
d64d77e99e | ||
![]() |
d7b11af731 | ||
![]() |
68372a4dfe | ||
![]() |
54500a5fca | ||
![]() |
146464e063 | ||
![]() |
07e251c887 | ||
![]() |
f9f51cb930 | ||
![]() |
ad3c22fae9 | ||
![]() |
bedcc5449a | ||
![]() |
a1a54fa8b7 | ||
![]() |
2a97bcbd5a | ||
![]() |
99fb4c4a47 | ||
![]() |
5b52685216 | ||
![]() |
53a924c20f | ||
![]() |
09ad541e98 | ||
![]() |
b109e16fba | ||
![]() |
043a80ff9e | ||
![]() |
b7f7af1713 | ||
![]() |
e5bd319c19 | ||
![]() |
90ad65a6e7 | ||
![]() |
779e80b7c1 | ||
![]() |
502e216ee2 | ||
![]() |
d6ff426d48 | ||
![]() |
c4311a250a | ||
![]() |
ceaa304f5f | ||
![]() |
038efa4173 | ||
![]() |
773fd5ad84 | ||
![]() |
9b46e92e13 | ||
![]() |
f004311611 | ||
![]() |
a4b949492b | ||
![]() |
6ab792fb03 | ||
![]() |
313c7386c4 | ||
![]() |
b0b4a05d44 | ||
![]() |
4e13b5374f | ||
![]() |
07897900eb | ||
![]() |
d286146c64 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
|
8
.github/workflows/windows_python.yml
vendored
8
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
# - uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
|
@@ -13,16 +13,18 @@ concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: true
|
||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||
# give you a fresh concretization for your root specs.
|
||||
reuse: dependencies
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
# Determine whether we want to target specific or generic
|
||||
# microarchitectures. Valid values are: "microarchitectures" or "generic".
|
||||
# An example of "microarchitectures" would be "skylake" or "bulldozer",
|
||||
# while an example of "generic" would be "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
@@ -33,4 +35,4 @@ concretizer:
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
unify: true
|
||||
|
@@ -46,7 +46,7 @@ modules:
|
||||
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
autoload: direct
|
||||
|
||||
# Default configurations if lmod is enabled
|
||||
lmod:
|
||||
|
@@ -28,7 +28,7 @@ packages:
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
golang: [go, gcc]
|
||||
go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
|
@@ -942,7 +942,7 @@ first ``libelf`` above, you would run:
|
||||
|
||||
$ spack load /qmm4kso
|
||||
|
||||
To see which packages that you have loaded to your enviornment you would
|
||||
To see which packages that you have loaded to your environment you would
|
||||
use ``spack find --loaded``.
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -18,7 +18,7 @@ your Spack mirror and then downloaded and installed by others.
|
||||
|
||||
Whenever a mirror provides prebuilt packages, Spack will take these packages
|
||||
into account during concretization and installation, making ``spack install``
|
||||
signficantly faster.
|
||||
significantly faster.
|
||||
|
||||
|
||||
.. note::
|
||||
|
@@ -28,11 +28,14 @@ This package provides the following variants:
|
||||
|
||||
* **cuda_arch**
|
||||
|
||||
This variant supports the optional specification of the architecture.
|
||||
This variant supports the optional specification of one or multiple architectures.
|
||||
Valid values are maintained in the ``cuda_arch_values`` property and
|
||||
are the numeric character equivalent of the compute capability version
|
||||
(e.g., '10' for version 1.0). Each provided value affects associated
|
||||
``CUDA`` dependencies and compiler conflicts.
|
||||
|
||||
The variant builds both PTX code for the _virtual_ architecture
|
||||
(e.g. ``compute_10``) and binary code for the _real_ architecture (e.g. ``sm_10``).
|
||||
|
||||
GPUs and their compute capability versions are listed at
|
||||
https://developer.nvidia.com/cuda-gpus .
|
||||
|
@@ -124,7 +124,7 @@ Using oneAPI Tools Installed by Spack
|
||||
=====================================
|
||||
|
||||
Spack can be a convenient way to install and configure compilers and
|
||||
libaries, even if you do not intend to build a Spack package. If you
|
||||
libraries, even if you do not intend to build a Spack package. If you
|
||||
want to build a Makefile project using Spack-installed oneAPI compilers,
|
||||
then use spack to configure your environment::
|
||||
|
||||
|
@@ -397,7 +397,7 @@ for specifics and examples for ``packages.yaml`` files.
|
||||
|
||||
.. If your system administrator did not provide modules for pre-installed Intel
|
||||
tools, you could do well to ask for them, because installing multiple copies
|
||||
of the Intel tools, as is wont to happen once Spack is in the picture, is
|
||||
of the Intel tools, as is won't to happen once Spack is in the picture, is
|
||||
bound to stretch disk space and patience thin. If you *are* the system
|
||||
administrator and are still new to modules, then perhaps it's best to follow
|
||||
the `next section <Installing Intel tools within Spack_>`_ and install the tools
|
||||
@@ -653,7 +653,7 @@ follow `the next section <intel-install-libs_>`_ instead.
|
||||
* If you specified a custom variant (for example ``+vtune``) you may want to add this as your
|
||||
preferred variant in the packages configuration for the ``intel-parallel-studio`` package
|
||||
as described in :ref:`package-preferences`. Otherwise you will have to specify
|
||||
the variant everytime ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
the variant every time ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
implementation to avoid pulling in a different variant.
|
||||
|
||||
* To set the Intel compilers for default use in Spack, instead of the usual ``%gcc``,
|
||||
|
@@ -582,7 +582,7 @@ libraries. Make sure not to add modules/packages containing the word
|
||||
"test", as these likely won't end up in the installation directory,
|
||||
or may require test dependencies like pytest to be installed.
|
||||
|
||||
Instead of defining the ``import_modules`` explicity, only the subset
|
||||
Instead of defining the ``import_modules`` explicitly, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
|
@@ -227,6 +227,9 @@ You can get the name to use for ``<platform>`` by running ``spack arch
|
||||
--platform``. The system config scope has a ``<platform>`` section for
|
||||
sites at which ``/etc`` is mounted on multiple heterogeneous machines.
|
||||
|
||||
|
||||
.. _config-scope-precedence:
|
||||
|
||||
----------------
|
||||
Scope Precedence
|
||||
----------------
|
||||
@@ -239,6 +242,11 @@ lower-precedence settings. Completely ignoring higher-level configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
There are also special notations for string concatenation and precendense override.
|
||||
Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
|
||||
to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
|
||||
:ref:`config-prepend-append`
|
||||
|
||||
^^^^^^^^^^^
|
||||
Simple keys
|
||||
^^^^^^^^^^^
|
||||
@@ -279,6 +287,47 @@ command:
|
||||
- ~/.spack/stage
|
||||
|
||||
|
||||
.. _config-prepend-append:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
String Concatenation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Above, the user ``config.yaml`` *completely* overrides specific settings in the
|
||||
default ``config.yaml``. Sometimes, it is useful to add a suffix/prefix
|
||||
to a path or name. To do this, you can use the ``-:`` notation for *append*
|
||||
string concatenation at the end of a key in a configuration file. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
:caption: ~/.spack/config.yaml
|
||||
|
||||
config:
|
||||
install_tree-: /my/custom/suffix/
|
||||
|
||||
Spack will then append to the lower-precedence configuration under the
|
||||
``install_tree-:`` section:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory/my/custom/suffix
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
|
||||
|
||||
Similarly, ``+:`` can be used to *prepend* to a path or name:
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
:caption: ~/.spack/config.yaml
|
||||
|
||||
config:
|
||||
install_tree+: /my/custom/suffix/
|
||||
|
||||
|
||||
.. _config-overrides:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -472,7 +472,7 @@ use my new hook as follows:
|
||||
.. code-block:: python
|
||||
|
||||
def post_log_write(message, level):
|
||||
"""Do something custom with the messsage and level every time we write
|
||||
"""Do something custom with the message and level every time we write
|
||||
to the log
|
||||
"""
|
||||
print('running post_log_write!')
|
||||
|
@@ -1597,8 +1597,8 @@ in a Windows CMD prompt.
|
||||
|
||||
.. note::
|
||||
If you chose to install Spack into a directory on Windows that is set up to require Administrative
|
||||
Privleges, Spack will require elevated privleges to run.
|
||||
Administrative Privleges can be denoted either by default such as
|
||||
Privileges, Spack will require elevated privileges to run.
|
||||
Administrative Privileges can be denoted either by default such as
|
||||
``C:\Program Files``, or aministrator applied administrative restrictions
|
||||
on a directory that spack installs files to such as ``C:\Users``
|
||||
|
||||
@@ -1694,7 +1694,7 @@ Spack console via:
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
|
@@ -13,7 +13,7 @@ The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
individual programmers on their development machines. To support this
|
||||
common practice Spack integrates with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ and `LMod
|
||||
<http://modules.sourceforge.net/>`_ and `Lmod
|
||||
<http://lmod.readthedocs.io/en/latest/>`_ by providing post-install hooks
|
||||
that generate module files and commands to manipulate them.
|
||||
|
||||
@@ -26,8 +26,8 @@ Using module files via Spack
|
||||
----------------------------
|
||||
|
||||
If you have installed a supported module system you should be able to
|
||||
run either ``module avail`` or ``use -l spack`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
run ``module avail`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
showing lots of installed packages:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -51,12 +51,7 @@ showing lots of installed packages:
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
You *can* use the modules here directly. For example, you could type either of these commands
|
||||
to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ use cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -93,9 +88,9 @@ the different file formats that can be generated by Spack:
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+=============================+====================+===============================+==============================================+======================+
|
||||
| **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/LMod |
|
||||
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | LMod |
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
|
||||
|
||||
@@ -396,13 +391,13 @@ name and version for all packages that depend on mpi.
|
||||
|
||||
When specifying module names by projection for Lmod modules, we
|
||||
recommend NOT including names of dependencies (e.g., MPI, compilers)
|
||||
that are already in the LMod hierarchy.
|
||||
that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
|
||||
.. note::
|
||||
TCL modules
|
||||
TCL modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -426,9 +421,9 @@ that are already in the LMod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
LMod hierarchical module files
|
||||
Lmod hierarchical module files
|
||||
When ``lmod`` is activated Spack will generate a set of hierarchical lua module
|
||||
files that are understood by LMod. The hierarchy will always contain the
|
||||
files that are understood by Lmod. The hierarchy will always contain the
|
||||
two layers ``Core`` / ``Compiler`` but can be further extended to
|
||||
any of the virtual dependencies present in Spack. A case that could be useful in
|
||||
practice is for instance:
|
||||
@@ -450,7 +445,7 @@ that are already in the LMod hierarchy.
|
||||
|
||||
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched
|
||||
independently. This allows a site to build the same libraries or applications against different
|
||||
implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
|
||||
implementations of ``mpi`` and ``lapack``, and let Lmod switch safely from one to the
|
||||
other.
|
||||
|
||||
All packages built with a compiler in ``core_compilers`` and all
|
||||
@@ -460,12 +455,12 @@ that are already in the LMod hierarchy.
|
||||
.. warning::
|
||||
Consistency of Core packages
|
||||
The user is responsible for maintining consistency among core packages, as ``core_specs``
|
||||
bypasses the hierarchy that allows LMod to safely switch between coherent software stacks.
|
||||
bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks.
|
||||
|
||||
.. warning::
|
||||
Deep hierarchies and ``lmod spider``
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
@@ -534,7 +529,7 @@ installed to ``/spack/prefix/foo``, if ``foo`` installs executables to
|
||||
update ``MANPATH``.
|
||||
|
||||
The default list of environment variables in this config section
|
||||
inludes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
|
||||
includes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
|
||||
and ``CMAKE_PREFIX_PATH``, as well as ``DYLD_FALLBACK_LIBRARY_PATH``
|
||||
on macOS. On Linux however, the corresponding ``LD_LIBRARY_PATH``
|
||||
variable is *not* set, because it affects the behavior of
|
||||
@@ -634,8 +629,9 @@ by its dependency; when the dependency is autoloaded, the executable will be in
|
||||
PATH. Similarly for scripting languages such as Python, packages and their dependencies
|
||||
have to be loaded together.
|
||||
|
||||
Autoloading is enabled by default for LMod, as it has great builtin support for through
|
||||
the ``depends_on`` function. For Environment Modules it is disabled by default.
|
||||
Autoloading is enabled by default for Lmod and Environment Modules. The former
|
||||
has builtin support for through the ``depends_on`` function. The latter uses
|
||||
``module load`` statement to load and track dependencies.
|
||||
|
||||
Autoloading can also be enabled conditionally:
|
||||
|
||||
@@ -655,12 +651,14 @@ The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
|
||||
.. note::
|
||||
TCL prerequisites
|
||||
Tcl prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement, which can be used to autoload dependencies in some versions
|
||||
of Environment Modules.
|
||||
statement, which autoloads dependencies on Environment Modules when its
|
||||
``auto_handling`` configuration option is enabled. If Environment Modules
|
||||
is installed with Spack, ``auto_handling`` is enabled by default starting
|
||||
version 4.2. Otherwise it is enabled by default since version 5.0.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,27 +9,32 @@
|
||||
CI Pipelines
|
||||
============
|
||||
|
||||
Spack provides commands that support generating and running automated build
|
||||
pipelines designed for Gitlab CI. At the highest level it works like this:
|
||||
provide a spack environment describing the set of packages you care about,
|
||||
and include within that environment file a description of how those packages
|
||||
should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml``
|
||||
file containing job descriptions for all your packages that can be run by a
|
||||
properly configured Gitlab CI instance. When run, the generated pipeline will
|
||||
build and deploy binaries, and it can optionally report to a CDash instance
|
||||
Spack provides commands that support generating and running automated build pipelines in CI instances. At the highest
|
||||
level it works like this: provide a spack environment describing the set of packages you care about, and include a
|
||||
description of how those packages should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml``
|
||||
file containing job descriptions for all your packages that can be run by a properly configured CI instance. When
|
||||
run, the generated pipeline will build and deploy binaries, and it can optionally report to a CDash instance
|
||||
regarding the health of the builds as they evolve over time.
|
||||
|
||||
------------------------------
|
||||
Getting started with pipelines
|
||||
------------------------------
|
||||
|
||||
It is fairly straightforward to get started with automated build pipelines. At
|
||||
a minimum, you'll need to set up a Gitlab instance (more about Gitlab CI
|
||||
`here <https://about.gitlab.com/product/continuous-integration/>`_) and configure
|
||||
at least one `runner <https://docs.gitlab.com/runner/>`_. Then the basic steps
|
||||
for setting up a build pipeline are as follows:
|
||||
To get started with automated build pipelines a Gitlab instance with version ``>= 12.9``
|
||||
(more about Gitlab CI `here <https://about.gitlab.com/product/continuous-integration/>`_)
|
||||
with at least one `runner <https://docs.gitlab.com/runner/>`_ configured is required. This
|
||||
can be done quickly by setting up a local Gitlab instance.
|
||||
|
||||
#. Create a repository on your gitlab instance
|
||||
It is possible to set up pipelines on gitlab.com, but the builds there are limited to
|
||||
60 minutes and generic hardware. It is possible to
|
||||
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
|
||||
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
|
||||
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
|
||||
topics are outside the scope of this document.
|
||||
|
||||
After setting up a Gitlab instance for running CI, the basic steps for setting up a build pipeline are as follows:
|
||||
|
||||
#. Create a repository in the Gitlab instance with CI and a runner enabled.
|
||||
#. Add a ``spack.yaml`` at the root containing your pipeline environment
|
||||
#. Add a ``.gitlab-ci.yml`` at the root containing two jobs (one to generate
|
||||
the pipeline dynamically, and one to run the generated jobs).
|
||||
@@ -40,13 +45,6 @@ See the :ref:`functional_example` section for a minimal working example. See al
|
||||
the :ref:`custom_Workflow` section for a link to an example of a custom workflow
|
||||
based on spack pipelines.
|
||||
|
||||
While it is possible to set up pipelines on gitlab.com, as illustrated above, the
|
||||
builds there are limited to 60 minutes and generic hardware. It is also possible to
|
||||
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
|
||||
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
|
||||
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
|
||||
topics are outside the scope of this document.
|
||||
|
||||
Spack's pipelines are now making use of the
|
||||
`trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run
|
||||
dynamically generated
|
||||
@@ -132,29 +130,35 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
|
||||
gitlab-ci:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- pushd ${SPACK_CONCRETE_ENV_DIR} && spack env activate --without-view . && popd
|
||||
- spack -d ci rebuild
|
||||
mappings:
|
||||
- match: ["os=ubuntu18.04"]
|
||||
runner-attributes:
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
tags:
|
||||
- docker
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
@@ -174,7 +178,7 @@ during subsequent pipeline runs.
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
|
||||
* In the build jobs (``runner-attributes``), the environment location changed.
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
|
||||
@@ -196,9 +200,9 @@ ci pipelines. These commands are covered in more detail in this section.
|
||||
|
||||
.. _cmd-spack-ci:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
``spack ci``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Super-command for functionality related to generating pipelines and executing
|
||||
pipeline jobs.
|
||||
@@ -227,7 +231,7 @@ Using ``--prune-dag`` or ``--no-prune-dag`` configures whether or not jobs are
|
||||
generated for specs that are already up to date on the mirror. If enabling
|
||||
DAG pruning using ``--prune-dag``, more information may be required in your
|
||||
``spack.yaml`` file, see the :ref:`noop_jobs` section below regarding
|
||||
``service-job-attributes``.
|
||||
``noop-job``.
|
||||
|
||||
The optional ``--check-index-only`` argument can be used to speed up pipeline
|
||||
generation by telling spack to consider only remote buildcache indices when
|
||||
@@ -263,11 +267,11 @@ generated by jobs in the pipeline.
|
||||
|
||||
.. _cmd-spack-ci-rebuild:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack ci rebuild``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The purpose of ``spack ci rebuild`` is straightforward: take its assigned
|
||||
The purpose of ``spack ci rebuild`` is to take an assigned
|
||||
spec and ensure a binary of a successful build exists on the target mirror.
|
||||
If the binary does not already exist, it is built from source and pushed
|
||||
to the mirror. The associated stand-alone tests are optionally run against
|
||||
@@ -280,7 +284,7 @@ directory. The script is run in a job to install the spec from source. The
|
||||
resulting binary package is pushed to the mirror. If ``cdash`` is configured
|
||||
for the environment, then the build results will be uploaded to the site.
|
||||
|
||||
Environment variables and values in the ``gitlab-ci`` section of the
|
||||
Environment variables and values in the ``ci::pipeline-gen`` section of the
|
||||
``spack.yaml`` environment file provide inputs to this process. The
|
||||
two main sources of environment variables are variables written into
|
||||
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
|
||||
@@ -298,21 +302,23 @@ A snippet from an example ``spack.yaml`` file illustrating use of this
|
||||
option *and* specification of a package with broken tests is given below.
|
||||
The inclusion of a spec for building ``gptune`` is not shown here. Note
|
||||
that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
``gitlab-ci`` script.
|
||||
``build-job`` script.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gitlab-ci:
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
@@ -354,113 +360,31 @@ arguments you can pass to ``spack ci reproduce-build`` in order to reproduce
|
||||
a particular build locally.
|
||||
|
||||
------------------------------------
|
||||
A pipeline-enabled spack environment
|
||||
Job Types
|
||||
------------------------------------
|
||||
|
||||
Here's an example of a spack environment file that has been enhanced with
|
||||
sections describing a build pipeline:
|
||||
^^^^^^^^^^^^^^^
|
||||
Rebuild (build)
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: yaml
|
||||
Rebuild jobs, denoted as ``build-job``'s in the ``pipeline-gen`` list, are jobs
|
||||
associated with concrete specs that have been marked for rebuild. By default a simple
|
||||
script for doing rebuild is generated, but may be modified as needed.
|
||||
|
||||
spack:
|
||||
definitions:
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
specs:
|
||||
- matrix:
|
||||
- [$pkgs]
|
||||
- [$compilers]
|
||||
- [$oses]
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
- match:
|
||||
- os=centos7
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/centos7
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: https://cdash.spack.io
|
||||
project: Spack
|
||||
site: Spack AWS Gitlab Instance
|
||||
The default script does three main steps, change directories to the pipelines concrete
|
||||
environment, activate the concrete environment, and run the ``spack ci rebuild`` command:
|
||||
|
||||
Hopefully, the ``definitions``, ``specs``, ``mirrors``, etc. sections are already
|
||||
familiar, as they are part of spack :ref:`environments`. So let's take a more
|
||||
in-depth look some of the pipeline-related sections in that environment file
|
||||
that might not be as familiar.
|
||||
.. code-block:: bash
|
||||
|
||||
The ``gitlab-ci`` section is used to configure how the pipeline workload should be
|
||||
generated, mainly how the jobs for building specs should be assigned to the
|
||||
configured runners on your instance. Each entry within the list of ``mappings``
|
||||
corresponds to a known gitlab runner, where the ``match`` section is used
|
||||
in assigning a release spec to one of the runners, and the ``runner-attributes``
|
||||
section is used to configure the spec/job for that particular runner.
|
||||
|
||||
Both the top-level ``gitlab-ci`` section as well as each ``runner-attributes``
|
||||
section can also contain the following keys: ``image``, ``tags``, ``variables``,
|
||||
``before_script``, ``script``, and ``after_script``. If any of these keys are
|
||||
provided at the ``gitlab-ci`` level, they will be used as the defaults for any
|
||||
``runner-attributes``, unless they are overridden in those sections. Specifying
|
||||
any of these keys at the ``runner-attributes`` level generally overrides the
|
||||
keys specified at the higher level, with a couple exceptions. Any ``variables``
|
||||
specified at both levels result in those dictionaries getting merged in the
|
||||
resulting generated job, and any duplicate variable names get assigned the value
|
||||
provided in the specific ``runner-attributes``. If ``tags`` are specified both
|
||||
at the ``gitlab-ci`` level as well as the ``runner-attributes`` level, then the
|
||||
lists of tags are combined, and any duplicates are removed.
|
||||
|
||||
See the section below on using a custom spack for an example of how these keys
|
||||
could be used.
|
||||
|
||||
There are other pipeline options you can configure within the ``gitlab-ci`` section
|
||||
as well.
|
||||
|
||||
The ``bootstrap`` section allows you to specify lists of specs from
|
||||
your ``definitions`` that should be staged ahead of the environment's ``specs`` (this
|
||||
section is described in more detail below). The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
The optional ``broken-specs-url`` key tells Spack to check against a list of
|
||||
specs that are known to be currently broken in ``develop``. If any such specs
|
||||
are found, the ``spack ci generate`` command will fail with an error message
|
||||
informing the user what broken specs were encountered. This allows the pipeline
|
||||
to fail early and avoid wasting compute resources attempting to build packages
|
||||
that will not succeed.
|
||||
|
||||
The optional ``cdash`` section provides information that will be used by the
|
||||
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
|
||||
to CDash. All the jobs generated from this environment will belong to a
|
||||
"build group" within CDash that can be tracked over time. As the release
|
||||
progresses, this build group may have jobs added or removed. The url, project,
|
||||
and site are used to specify the CDash instance to which build results should
|
||||
be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/gitlab_ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
cd ${concrete_environment_dir}
|
||||
spack env activate --without-view .
|
||||
spack ci rebuild
|
||||
|
||||
.. _rebuild_index:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Note about rebuilding buildcache index
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Update Index (reindex)
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, while a pipeline job may rebuild a package, create a buildcache
|
||||
entry, and push it to the mirror, it does not automatically re-generate the
|
||||
@@ -475,21 +399,44 @@ not correctly reflect the mirror's contents at the end of a pipeline.
|
||||
To make sure the buildcache index is up to date at the end of your pipeline,
|
||||
spack generates a job to update the buildcache index of the target mirror
|
||||
at the end of each pipeline by default. You can disable this behavior by
|
||||
adding ``rebuild-index: False`` inside the ``gitlab-ci`` section of your
|
||||
spack environment. Spack will assign the job any runner attributes found
|
||||
on the ``service-job-attributes``, if you have provided that in your
|
||||
``spack.yaml``.
|
||||
adding ``rebuild-index: False`` inside the ``ci`` section of your
|
||||
spack environment.
|
||||
|
||||
Reindex jobs do not allow modifying the ``script`` attribute since it is automatically
|
||||
generated using the target mirror listed in the ``mirrors::mirror`` configuration.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Signing (signing)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
This job is run after all of the rebuild jobs are completed and is intended to be used
|
||||
to sign the package binaries built by a protected CI run. Signing jobs are generated
|
||||
only if a signing job ``script`` is specified and the spack CI job type is protected.
|
||||
Note, if an ``any-job`` section contains a script, this will not implicitly create a
|
||||
``signing`` job, a signing job may only exist if it is explicitly specified in the
|
||||
configuration with a ``script`` attribute. Specifying a signing job without a script
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Note about "no-op" jobs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
No Op (noop)
|
||||
^^^^^^^^^^^^
|
||||
|
||||
If no specs in an environment need to be rebuilt during a given pipeline run
|
||||
(meaning all are already up to date on the mirror), a single successful job
|
||||
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
||||
considers to be an error). An optional ``service-job-attributes`` section
|
||||
considers to be an error). The ``noop-job*`` sections
|
||||
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
||||
``image`` or ``variables`` for the generated NO-OP job. This section also
|
||||
supports providing ``before_script``, ``script``, and ``after_script``, in
|
||||
@@ -499,51 +446,100 @@ Following is an example of this section added to a ``spack.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- openmpi
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=centos8
|
||||
runner-attributes:
|
||||
tags:
|
||||
- custom
|
||||
- tag
|
||||
image: spack/centos7
|
||||
service-job-attributes:
|
||||
tags: ['custom', 'tag']
|
||||
image:
|
||||
name: 'some.image.registry/custom-image:latest'
|
||||
entrypoint: ['/bin/bash']
|
||||
script:
|
||||
- echo "Custom message in a custom script"
|
||||
spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- noop-job:
|
||||
tags: ['custom', 'tag']
|
||||
image:
|
||||
name: 'some.image.registry/custom-image:latest'
|
||||
entrypoint: ['/bin/bash']
|
||||
script::
|
||||
- echo "Custom message in a custom script"
|
||||
|
||||
The example above illustrates how you can provide the attributes used to run
|
||||
the NO-OP job in the case of an empty pipeline. The only field for the NO-OP
|
||||
job that might be generated for you is ``script``, but that will only happen
|
||||
if you do not provide one yourself.
|
||||
if you do not provide one yourself. Notice in this example the ``script``
|
||||
uses the ``::`` notation to prescribe override behavior. Without this, the
|
||||
``echo`` command would have been prepended to the automatically generated script
|
||||
rather than replacing it.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Assignment of specs to runners
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
------------------------------------
|
||||
ci.yaml
|
||||
------------------------------------
|
||||
|
||||
The ``mappings`` section corresponds to a list of runners, and during assignment
|
||||
of specs to runners, the list is traversed in order looking for matches, the
|
||||
first runner that matches a release spec is assigned to build that spec. The
|
||||
``match`` section within each runner mapping section is a list of specs, and
|
||||
if any of those specs match the release spec (the ``spec.satisfies()`` method
|
||||
is used), then that runner is considered a match.
|
||||
Here's an example of a spack configuration file describing a build pipeline:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration of specs/jobs for a runner
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
.. code-block:: yaml
|
||||
|
||||
Once a runner has been chosen to build a release spec, the ``runner-attributes``
|
||||
section provides information determining details of the job in the context of
|
||||
the runner. The ``runner-attributes`` section must have a ``tags`` key, which
|
||||
ci:
|
||||
target: gitlab
|
||||
|
||||
rebuild_index: True
|
||||
|
||||
broken-specs-url: https://broken.specs.url
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
- match:
|
||||
- os=centos7
|
||||
build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/centos7
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: https://cdash.spack.io
|
||||
project: Spack
|
||||
site: Spack AWS Gitlab Instance
|
||||
|
||||
The ``ci`` config section is used to configure how the pipeline workload should be
|
||||
generated, mainly how the jobs for building specs should be assigned to the
|
||||
configured runners on your instance. The main section for configuring pipelines
|
||||
is ``pipeline-gen``, which is a list of job attribute sections that are merged,
|
||||
using the same rules as Spack configs (:ref:`config-scope-precedence`), from the bottom up.
|
||||
The order sections are applied is to be consistent with how spack orders scope precedence when merging lists.
|
||||
There are two main section types, ``<type>-job`` sections and ``submapping``
|
||||
sections.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Job Attribute Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each type of job may have attributes added or removed via sections in the ``pipeline-gen``
|
||||
list. Job type specific attributes may be specified using the keys ``<type>-job`` to
|
||||
add attributes to all jobs of type ``<type>`` or ``<type>-job-remove`` to remove attributes
|
||||
of type ``<type>``. Each section may only contain one type of job attribute specification, ie. ,
|
||||
``build-job`` and ``noop-job`` may not coexist but ``build-job`` and ``build-job-remove`` may.
|
||||
|
||||
.. note::
|
||||
The ``*-remove`` specifications are applied before the additive attribute specification.
|
||||
For example, in the case where both ``build-job`` and ``build-job-remove`` are listed in
|
||||
the same ``pipeline-gen`` section, the value will still exist in the merged build-job after
|
||||
applying the section.
|
||||
|
||||
All of the attributes specified are forwarded to the generated CI jobs, however special
|
||||
treatment is applied to the attributes ``tags``, ``image``, ``variables``, ``script``,
|
||||
``before_script``, and ``after_script`` as they are components recognized explicitly by the
|
||||
Spack CI generator. For the ``tags`` attribute, Spack will remove reserved tags
|
||||
(:ref:`reserved_tags`) from all jobs specified in the config. In some cases, such as for
|
||||
``signing`` jobs, reserved tags will be added back based on the type of CI that is being run.
|
||||
|
||||
Once a runner has been chosen to build a release spec, the ``build-job*``
|
||||
sections provide information determining details of the job in the context of
|
||||
the runner. At lease one of the ``build-job*`` sections must contain a ``tags`` key, which
|
||||
is a list containing at least one tag used to select the runner from among the
|
||||
runners known to the gitlab instance. For Docker executor type runners, the
|
||||
``image`` key is used to specify the Docker image used to build the release spec
|
||||
@@ -554,7 +550,7 @@ information on to the runner that it needs to do its work (e.g. scheduler
|
||||
parameters, etc.). Any ``variables`` provided here will be added, verbatim, to
|
||||
each job.
|
||||
|
||||
The ``runner-attributes`` section also allows users to supply custom ``script``,
|
||||
The ``build-job`` section also allows users to supply custom ``script``,
|
||||
``before_script``, and ``after_script`` sections to be applied to every job
|
||||
scheduled on that runner. This allows users to do any custom preparation or
|
||||
cleanup tasks that fit their particular workflow, as well as completely
|
||||
@@ -565,46 +561,45 @@ environment directory is located within your ``--artifacts_root`` (or if not
|
||||
provided, within your ``$CI_PROJECT_DIR``), activates that environment for
|
||||
you, and invokes ``spack ci rebuild``.
|
||||
|
||||
.. _staging_algorithm:
|
||||
Sections that specify scripts (``script``, ``before_script``, ``after_script``) are all
|
||||
read as lists of commands or lists of lists of commands. It is recommended to write scripts
|
||||
as lists of lists if scripts will be composed via merging. The default behavior of merging
|
||||
lists will remove duplicate commands and potentially apply unwanted reordering, whereas
|
||||
merging lists of lists will preserve the local ordering and never removes duplicate
|
||||
commands. When writing commands to the CI target script, all lists are expanded and
|
||||
flattened into a single list.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Summary of ``.gitlab-ci.yml`` generation algorithm
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Submapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All specs yielded by the matrix (or all the specs in the environment) have their
|
||||
dependencies computed, and the entire resulting set of specs are staged together
|
||||
before being run through the ``gitlab-ci/mappings`` entries, where each staged
|
||||
spec is assigned a runner. "Staging" is the name given to the process of
|
||||
figuring out in what order the specs should be built, taking into consideration
|
||||
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
|
||||
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
|
||||
any stage only depend on jobs in previous stages (since those jobs are guaranteed
|
||||
to have completed already). As a runner is determined for a job, the information
|
||||
in the ``runner-attributes`` is used to populate various parts of the job
|
||||
description that will be used by Gitlab CI. Once all the jobs have been assigned
|
||||
a runner, the ``.gitlab-ci.yml`` is written to disk.
|
||||
A special case of attribute specification is the ``submapping`` section which may be used
|
||||
to apply job attributes to build jobs based on the package spec associated with the rebuild
|
||||
job. Submapping is specified as a list of spec ``match`` lists associated with
|
||||
``build-job``/``build-job-remove`` sections. There are two options for ``match_behavior``,
|
||||
either ``first`` or ``merge`` may be specified. In either case, the ``submapping`` list is
|
||||
processed from the bottom up, and then each ``match`` list is searched for a string that
|
||||
satisfies the check ``spec.satisfies({match_item})`` for each concrete spec.
|
||||
|
||||
The short example provided above would result in the ``readline``, ``ncurses``,
|
||||
and ``pkgconf`` packages getting staged and built on the runner chosen by the
|
||||
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
|
||||
type runner, and thus certain jobs will be run in the ``centos7`` container,
|
||||
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
|
||||
will contain 6 jobs in three stages. Once the jobs have been generated, the
|
||||
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
``spack ci generate`` command would result in all of the jobs being put in a
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
The the case of ``match_behavior: first``, the first ``match`` section in the list of
|
||||
``submappings`` that contains a string that satisfies the spec will apply it's
|
||||
``build-job*`` attributes to the rebuild job associated with that spec. This is the
|
||||
default behavior and will be the method if no ``match_behavior`` is specified.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional compiler bootstrapping
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
The the case of ``merge`` match, all of the ``match`` sections in the list of
|
||||
``submappings`` that contain a string that satisfies the spec will have the associated
|
||||
``build-job*`` attributes applied to the rebuild job associated with that spec. Again,
|
||||
the attributes will be merged starting from the bottom match going up to the top match.
|
||||
|
||||
Spack pipelines also have support for bootstrapping compilers on systems that
|
||||
may not already have the desired compilers installed. The idea here is that
|
||||
you can specify a list of things to bootstrap in your ``definitions``, and
|
||||
spack will guarantee those will be installed in a phase of the pipeline before
|
||||
your release specs, so that you can rely on those packages being available in
|
||||
the binary mirror when you need them later on in the pipeline. At the moment
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
|
||||
The ``bootstrap`` section allows you to specify lists of specs from
|
||||
your ``definitions`` that should be staged ahead of the environment's ``specs``. At the moment
|
||||
the only viable use-case for bootstrapping is to install compilers.
|
||||
|
||||
Here's an example of what bootstrapping some compilers might look like:
|
||||
@@ -680,6 +675,86 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
The optional ``broken-specs-url`` key tells Spack to check against a list of
|
||||
specs that are known to be currently broken in ``develop``. If any such specs
|
||||
are found, the ``spack ci generate`` command will fail with an error message
|
||||
informing the user what broken specs were encountered. This allows the pipeline
|
||||
to fail early and avoid wasting compute resources attempting to build packages
|
||||
that will not succeed.
|
||||
|
||||
^^^^^
|
||||
CDash
|
||||
^^^^^
|
||||
|
||||
The optional ``cdash`` section provides information that will be used by the
|
||||
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
|
||||
to CDash. All the jobs generated from this environment will belong to a
|
||||
"build group" within CDash that can be tracked over time. As the release
|
||||
progresses, this build group may have jobs added or removed. The url, project,
|
||||
and site are used to specify the CDash instance to which build results should
|
||||
be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
|
||||
.. _reserved_tags:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Reserved Tags
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Spack has a subset of tags (``public``, ``protected``, and ``notary``) that it reserves
|
||||
for classifying runners that may require special permissions or access. The tags
|
||||
``public`` and ``protected`` are used to distinguish between runners that use public
|
||||
permissions and runners with protected permissions. The ``notary`` tag is a special tag
|
||||
that is used to indicate runners that have access to the highly protected information
|
||||
used for signing binaries using the ``signing`` job.
|
||||
|
||||
.. _staging_algorithm:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Summary of ``.gitlab-ci.yml`` generation algorithm
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All specs yielded by the matrix (or all the specs in the environment) have their
|
||||
dependencies computed, and the entire resulting set of specs are staged together
|
||||
before being run through the ``ci/pipeline-gen`` entries, where each staged
|
||||
spec is assigned a runner. "Staging" is the name given to the process of
|
||||
figuring out in what order the specs should be built, taking into consideration
|
||||
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
|
||||
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
|
||||
any stage only depend on jobs in previous stages (since those jobs are guaranteed
|
||||
to have completed already). As a runner is determined for a job, the information
|
||||
in the merged ``any-job*`` and ``build-job*`` sections is used to populate various parts of the job
|
||||
description that will be used by the target CI pipelines. Once all the jobs have been assigned
|
||||
a runner, the ``.gitlab-ci.yml`` is written to disk.
|
||||
|
||||
The short example provided above would result in the ``readline``, ``ncurses``,
|
||||
and ``pkgconf`` packages getting staged and built on the runner chosen by the
|
||||
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
|
||||
type runner, and thus certain jobs will be run in the ``centos7`` container,
|
||||
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
|
||||
will contain 6 jobs in three stages. Once the jobs have been generated, the
|
||||
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
``spack ci generate`` command would result in all of the jobs being put in a
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
|
||||
-------------------------------------
|
||||
Using a custom spack in your pipeline
|
||||
-------------------------------------
|
||||
@@ -726,23 +801,21 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
|
||||
|
||||
spack:
|
||||
...
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack -d ci rebuild
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack -d ci rebuild
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
|
||||
Now all of the generated rebuild jobs will use the same shell script to clone
|
||||
spack before running their actual workload.
|
||||
@@ -831,3 +904,4 @@ verify binary packages (when installing or creating buildcaches). You could
|
||||
also have already trusted a key spack know about, or if no key is present anywhere,
|
||||
spack will install specs using ``--no-check-signature`` and create buildcaches
|
||||
using ``-u`` (for unsigned binaries).
|
||||
|
||||
|
93
lib/spack/env/cc
vendored
93
lib/spack/env/cc
vendored
@@ -427,6 +427,48 @@ isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
# drop -Wl
|
||||
shift
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
rp="$1"
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
rp=""
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
rp="${1#-rpath=}"
|
||||
;;
|
||||
--rpath=*)
|
||||
rp="${1#--rpath=}"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
done
|
||||
# By lack of local variables, always set this to empty string.
|
||||
rp=""
|
||||
}
|
||||
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
@@ -526,54 +568,9 @@ while [ $# -ne 0 ]; do
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
arg="${1#-Wl,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath,*) rp="${arg#-rpath,}" ;;
|
||||
--rpath,*) rp="${arg#--rpath,}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Wl,*)
|
||||
rp="${arg#-Wl,}"
|
||||
;;
|
||||
*)
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
: # We want to remove explicitly this flag
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$arg"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
-Xlinker,*)
|
||||
arg="${1#-Xlinker,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Xlinker,*)
|
||||
rp="${arg#-Xlinker,}"
|
||||
;;
|
||||
*)
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Xlinker,$arg"
|
||||
;;
|
||||
esac
|
||||
IFS=,
|
||||
parse_Wl $1
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
if [ "$2" = "-rpath" ]; then
|
||||
|
@@ -16,7 +16,6 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
from typing import Callable, List, Match, Optional, Tuple, Union
|
||||
|
||||
from llnl.util import tty
|
||||
@@ -26,9 +25,7 @@
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
import pwd
|
||||
else:
|
||||
@@ -154,7 +151,7 @@ def lookup(name):
|
||||
|
||||
|
||||
def getuid():
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
@@ -167,7 +164,7 @@ def getuid():
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
@@ -196,7 +193,7 @@ def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
@@ -551,7 +548,7 @@ def get_owner_uid(path, err_msg=None):
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
if sys.platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
@@ -584,7 +581,7 @@ def group_ids(uid=None):
|
||||
Returns:
|
||||
(list of int): gids of groups the user is a member of
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
tty.warn("Function is not supported on Windows")
|
||||
return []
|
||||
|
||||
@@ -604,7 +601,7 @@ def group_ids(uid=None):
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
|
||||
if isinstance(group, str):
|
||||
@@ -1131,7 +1128,7 @@ def open_if_filename(str_or_file, mode="r"):
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
perms = os.O_WRONLY | os.O_CREAT
|
||||
else:
|
||||
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
|
||||
@@ -1193,7 +1190,7 @@ def temp_cwd():
|
||||
yield tmp_dir
|
||||
finally:
|
||||
kwargs = {}
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
kwargs["ignore_errors"] = False
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
shutil.rmtree(tmp_dir, **kwargs)
|
||||
@@ -1438,7 +1435,7 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
@@ -1547,11 +1544,11 @@ def readonly_file_handler(ignore_errors=False):
|
||||
"""
|
||||
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if (
|
||||
is_windows
|
||||
sys.platform == "win32"
|
||||
and func in (os.rmdir, os.remove, os.unlink)
|
||||
and excvalue.errno == errno.EACCES
|
||||
):
|
||||
@@ -1581,7 +1578,7 @@ def remove_linked_tree(path):
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
kwargs["ignore_errors"] = False
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
@@ -2095,7 +2092,7 @@ def names(self):
|
||||
# on non Windows platform
|
||||
# Windows valid library extensions are:
|
||||
# ['.dll', '.lib']
|
||||
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
|
||||
valid_exts = [".dll", ".lib"] if sys.platform == "win32" else [".dylib", ".so", ".a"]
|
||||
for ext in valid_exts:
|
||||
i = name.rfind(ext)
|
||||
if i != -1:
|
||||
@@ -2243,7 +2240,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
raise TypeError(message)
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
static_ext = "lib"
|
||||
# For linking (runtime=False) you need the .lib files regardless of
|
||||
# whether you are doing a shared or static link
|
||||
@@ -2275,7 +2272,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
# finally search all of root recursively. The search stops when the first
|
||||
# match is found.
|
||||
common_lib_dirs = ["lib", "lib64"]
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
common_lib_dirs.extend(["bin", "Lib"])
|
||||
|
||||
for subdir in common_lib_dirs:
|
||||
@@ -2410,7 +2407,7 @@ def _link(self, path, dest_dir):
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
except OSError as e:
|
||||
if e.winerror == 183:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
|
@@ -5,15 +5,13 @@
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
from sys import platform as _platform
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
|
||||
@@ -23,7 +21,7 @@ def symlink(real_path, link_path):
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
@@ -32,9 +30,15 @@ def symlink(real_path, link_path):
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
|
||||
|
||||
def islink(path):
|
||||
@@ -99,7 +103,7 @@ def _win32_is_junction(path):
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
|
@@ -25,7 +25,7 @@ def architecture_compatible(self, target, constraint):
|
||||
return (
|
||||
not target.architecture
|
||||
or not constraint.architecture
|
||||
or target.architecture.satisfies(constraint.architecture)
|
||||
or target.architecture.intersects(constraint.architecture)
|
||||
)
|
||||
|
||||
@memoized
|
||||
@@ -104,7 +104,7 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.satisfies(cversion):
|
||||
if pversion.intersects(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||
pversion, cversion
|
||||
|
@@ -695,8 +695,11 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
error_msg = "The variant '{}' default value in package '{}' cannot be validated"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
error_msg = (
|
||||
"The default value of the variant '{}' in package '{}' failed validation"
|
||||
)
|
||||
question = "Is it among the allowed values?"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -721,7 +724,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import codecs
|
||||
import collections
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
@@ -20,7 +22,8 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -39,6 +42,7 @@
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.util.crypto
|
||||
@@ -739,34 +743,31 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def write_buildinfo_file(spec, workdir, rel=False):
|
||||
"""
|
||||
Create a cache file containing information
|
||||
required for the relocation
|
||||
"""
|
||||
def prefixes_to_hashes(spec):
|
||||
return {
|
||||
str(s.prefix): s.dag_hash()
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
prefix_to_hash = dict()
|
||||
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
|
||||
deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||
for d in deps + spec.dependencies(deptype="run"):
|
||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
buildinfo = {}
|
||||
buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
|
||||
buildinfo["relative_rpaths"] = rel
|
||||
buildinfo["buildpath"] = spack.store.layout.root
|
||||
buildinfo["spackprefix"] = spack.paths.prefix
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
|
||||
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
|
||||
buildinfo["relocate_links"] = manifest["link_to_relocate"]
|
||||
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
|
||||
buildinfo["prefix_to_hash"] = prefix_to_hash
|
||||
filename = buildinfo_file_name(workdir)
|
||||
with open(filename, "w") as outfile:
|
||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
"relocate_textfiles": manifest["text_to_relocate"],
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"prefix_to_hash": prefixes_to_hashes(spec),
|
||||
}
|
||||
|
||||
|
||||
def tarball_directory_name(spec):
|
||||
@@ -1139,6 +1140,68 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gzip_compressed_tarfile(path):
|
||||
"""Create a reproducible, compressed tarfile"""
|
||||
# Create gzip compressed tarball of the install prefix
|
||||
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
|
||||
# If the filename="" is dropped, Python will use fileobj.name instead.
|
||||
# This should effectively mimick `gzip --no-name`.
|
||||
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as fileobj, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
|
||||
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
|
||||
yield tar
|
||||
|
||||
|
||||
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||
# We only add files, symlinks, hardlinks, and directories
|
||||
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||
if tarinfo.isdev():
|
||||
return None
|
||||
|
||||
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||
# root will extract as root, and non-privileged user will extract as themselves.
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
|
||||
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||
tarinfo.mtime = 0
|
||||
|
||||
# Normalize mode
|
||||
if tarinfo.isfile() or tarinfo.islnk():
|
||||
# If user can execute, use 0o755; else 0o644
|
||||
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||
# extracted when Python or tar is run with privileges
|
||||
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||
else: # symbolic link and directories
|
||||
tarinfo.mode = 0o755
|
||||
|
||||
return tarinfo
|
||||
|
||||
|
||||
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=path)
|
||||
tarinfo.size = len(bstring)
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
def _build_tarball(
|
||||
spec,
|
||||
out_url,
|
||||
@@ -1156,15 +1219,37 @@ def _build_tarball(
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
|
||||
# set up some paths
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
_build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir=tmpdir,
|
||||
force=force,
|
||||
relative=relative,
|
||||
unsigned=unsigned,
|
||||
allow_root=allow_root,
|
||||
key=key,
|
||||
regenerate_index=regenerate_index,
|
||||
)
|
||||
|
||||
|
||||
def _build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
allow_root=False,
|
||||
key=None,
|
||||
regenerate_index=False,
|
||||
):
|
||||
cache_prefix = build_cache_prefix(stage_dir)
|
||||
tarfile_name = tarball_name(spec, ".spack")
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
@@ -1183,7 +1268,7 @@ def _build_tarball(
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(stage_dir))
|
||||
)
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
|
||||
@@ -1199,7 +1284,7 @@ def _build_tarball(
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(tmpdir, pkg_dir)
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
@@ -1217,39 +1302,22 @@ def _build_tarball(
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
mkdirp(os.path.join(workdir, ".spack"))
|
||||
|
||||
# create info for later relocation and create tar
|
||||
write_buildinfo_file(spec, workdir, relative)
|
||||
buildinfo = get_buildinfo_dict(spec, relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
try:
|
||||
if relative:
|
||||
make_package_relative(workdir, spec, allow_root)
|
||||
elif not allow_root:
|
||||
ensure_package_relocatable(workdir, binaries_dir)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
if relative:
|
||||
make_package_relative(workdir, spec, buildinfo, allow_root)
|
||||
elif not allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
# create gzip compressed tarball of the install prefix
|
||||
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir)
|
||||
if not relative:
|
||||
# Add buildinfo file
|
||||
buildinfo_path = buildinfo_file_name(workdir)
|
||||
buildinfo_arcname = buildinfo_file_name(pkg_dir)
|
||||
tar.add(name=buildinfo_path, arcname=buildinfo_arcname)
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
shutil.rmtree(workdir)
|
||||
if relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
@@ -1275,7 +1343,11 @@ def _build_tarball(
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
outfile.write(sjson.dump(spec_dict))
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||
# So, here we still add newlines, but no indent, so save on file size and
|
||||
# line length.
|
||||
json.dump(spec_dict, outfile, indent=0, separators=(",", ":"))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
@@ -1292,18 +1364,15 @@ def _build_tarball(
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'.format(spec, remote_spackfile_path))
|
||||
|
||||
try:
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=stage_dir)
|
||||
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir)))
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
@@ -1536,13 +1605,12 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, allow_root):
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
@@ -1566,9 +1634,8 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(workdir, binaries_dir):
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
relocate.ensure_binaries_are_relocatable(binaries)
|
||||
|
||||
|
@@ -208,7 +208,7 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get("python", None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
|
@@ -69,13 +69,13 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
@@ -397,7 +397,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -485,7 +485,13 @@ def update_compiler_args_for_dep(dep):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
# In some circumstances (particularly for externals) finding
|
||||
# libraries packages can be time consuming, so indicate that
|
||||
# we are performing this operation (and also report when it
|
||||
# finishes).
|
||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
|
||||
@@ -772,7 +778,9 @@ def setup_package(pkg, dirty, context="build"):
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
# architecture specific setup
|
||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||
@@ -780,6 +788,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == "build":
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
|
||||
@@ -790,6 +799,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == "test":
|
||||
tty.debug("setup_package: setup test environment for root")
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
@@ -806,6 +816,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
@@ -943,6 +954,7 @@ def default_modifications_for_dep(dep):
|
||||
_make_runnable(dep, env)
|
||||
|
||||
def add_modifications_for_dep(dep):
|
||||
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
@@ -968,6 +980,7 @@ def add_modifications_for_dep(dep):
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, depends_on, variant
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
@@ -35,6 +35,43 @@ def _extract_primary_generator(generator):
|
||||
return primary_generator
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
Currently, "Unix Makefiles" and "Ninja" are the only generators
|
||||
that Spack supports. Defaults to "Unix Makefiles".
|
||||
|
||||
See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
for more information.
|
||||
|
||||
Args:
|
||||
names: allowed generators for this package
|
||||
default: default generator
|
||||
"""
|
||||
allowed_values = ("make", "ninja")
|
||||
if any(x not in allowed_values for x in names):
|
||||
msg = "only 'make' and 'ninja' are allowed for CMake's 'generator' directive"
|
||||
raise ValueError(msg)
|
||||
|
||||
default = default or names[0]
|
||||
not_used = [x for x in allowed_values if x not in names]
|
||||
|
||||
def _values(x):
|
||||
return x in allowed_values
|
||||
|
||||
_values.__doc__ = f"{','.join(names)}"
|
||||
|
||||
variant(
|
||||
"generator",
|
||||
default=default,
|
||||
values=_values,
|
||||
description="the build system generator to use",
|
||||
)
|
||||
for x in not_used:
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -67,8 +104,15 @@ class CMakePackage(spack.package_base.PackageBase):
|
||||
when="^cmake@3.9:",
|
||||
description="CMake interprocedural optimization",
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
generator("ninja")
|
||||
else:
|
||||
generator("ninja", "make", default="make")
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("ninja", type="build", when="platform=windows")
|
||||
depends_on("gmake", type="build", when="generator=make")
|
||||
depends_on("ninja", type="build", when="generator=ninja")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Return a list of all command line arguments to pass the specified
|
||||
@@ -138,18 +182,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
| :py:meth:`~.CMakeBuilder.build_directory` | Directory where to |
|
||||
| | build the package |
|
||||
+-----------------------------------------------+--------------------+
|
||||
|
||||
The generator used by CMake can be specified by providing the ``generator``
|
||||
attribute. Per
|
||||
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
|
||||
the format is: [<secondary-generator> - ]<primary_generator>.
|
||||
|
||||
The full list of primary and secondary generators supported by CMake may be found
|
||||
in the documentation for the version of CMake used; however, at this time Spack
|
||||
supports only the primary generators "Unix Makefiles" and "Ninja." Spack's CMake
|
||||
support is agnostic with respect to primary generators. Spack will generate a
|
||||
runtime error if the generator string does not follow the prescribed format, or if
|
||||
the primary generator is not supported.
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
@@ -160,7 +192,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"generator",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
"build_time_test_callbacks",
|
||||
@@ -171,16 +202,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
#: The build system generator to use.
|
||||
#:
|
||||
#: See ``cmake --help`` for a list of valid generators.
|
||||
#: Currently, "Unix Makefiles" and "Ninja" are the only generators
|
||||
#: that Spack supports. Defaults to "Unix Makefiles".
|
||||
#:
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets: List[str] = []
|
||||
#: Targets to be used during the install phase
|
||||
@@ -202,12 +223,20 @@ def root_cmakelists_dir(self):
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
# standard CMake arguments
|
||||
std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||
std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
|
||||
return std_cmake_args
|
||||
|
@@ -38,6 +38,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
@@ -361,60 +362,7 @@ def append_dep(s, d):
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def _remove_attributes(src_dict, dest_dict):
|
||||
if "tags" in src_dict and "tags" in dest_dict:
|
||||
# For 'tags', we remove any tags that are listed for removal
|
||||
for tag in src_dict["tags"]:
|
||||
while tag in dest_dict["tags"]:
|
||||
dest_dict["tags"].remove(tag)
|
||||
|
||||
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == "tags":
|
||||
# For 'tags', we combine the lists of tags, while
|
||||
# avoiding duplicates
|
||||
for tag in src_dict[runner_attr]:
|
||||
if tag not in dest_dict[runner_attr]:
|
||||
dest_dict[runner_attr].append(tag)
|
||||
elif runner_attr in dest_dict and runner_attr == "variables":
|
||||
# For 'variables', we merge the dictionaries. Any conflicts
|
||||
# (i.e. 'runner-attributes' has same variable key as the
|
||||
# higher level) we resolve by keeping the more specific
|
||||
# 'runner-attributes' version.
|
||||
for src_key, src_val in src_dict[runner_attr].items():
|
||||
dest_dict[runner_attr][src_key] = copy.deepcopy(src_dict[runner_attr][src_key])
|
||||
else:
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
matched = False
|
||||
only_first = gitlab_ci.get("match_behavior", "first") == "first"
|
||||
for ci_mapping in gitlab_ci["mappings"]:
|
||||
for match_string in ci_mapping["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "remove-attributes" in ci_mapping:
|
||||
_remove_attributes(ci_mapping["remove-attributes"], runner_attributes)
|
||||
if "runner-attributes" in ci_mapping:
|
||||
_copy_attributes(
|
||||
overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
|
||||
)
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return runner_attributes if matched else None
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
@@ -490,16 +438,28 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs):
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
"""Given a list of package names and an active/concretized
|
||||
environment, return the set of all concrete specs from the
|
||||
environment that could have been affected by changing the
|
||||
list of packages.
|
||||
|
||||
If a ``dependent_traverse_depth`` is given, it is used to limit
|
||||
upward (in the parent direction) traversal of specs of touched
|
||||
packages. E.g. if 1 is provided, then only direct dependents
|
||||
of touched package specs are traversed to produce specs that
|
||||
could have been affected by changing the package, while if 0 is
|
||||
provided, only the changed specs themselves are traversed. If ``None``
|
||||
is given, upward traversal of touched package specs is done all
|
||||
the way to the environment roots. Providing a negative number
|
||||
results in no traversals at all, yielding an empty set.
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Active concrete environment
|
||||
affected_pkgs (List[str]): Affected package names
|
||||
dependent_traverse_depth: Optional integer to limit dependent
|
||||
traversal, or None to disable the limit.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -512,17 +472,237 @@ def get_spec_filter_list(env, affected_pkgs):
|
||||
tty.debug("All concrete environment specs:")
|
||||
for s in all_concrete_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
env_matches = [s for s in all_concrete_specs if s.name in frozenset(affected_pkgs)]
|
||||
affected_pkgs = frozenset(affected_pkgs)
|
||||
env_matches = [s for s in all_concrete_specs if s.name in affected_pkgs]
|
||||
visited = set()
|
||||
dag_hash = lambda s: s.dag_hash()
|
||||
for match in env_matches:
|
||||
for parent in match.traverse(direction="parents", key=dag_hash):
|
||||
affected_specs.update(
|
||||
parent.traverse(direction="children", visited=visited, key=dag_hash)
|
||||
)
|
||||
for depth, parent in traverse.traverse_nodes(
|
||||
env_matches, direction="parents", key=dag_hash, depth=True, order="breadth"
|
||||
):
|
||||
if dependent_traverse_depth is not None and depth > dependent_traverse_depth:
|
||||
break
|
||||
affected_specs.update(parent.traverse(direction="children", visited=visited, key=dag_hash))
|
||||
return affected_specs
|
||||
|
||||
|
||||
def _build_jobs(phases, staged_phases):
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
|
||||
|
||||
def _noop(x):
|
||||
return x
|
||||
|
||||
|
||||
def _unpack_script(script_section, op=_noop):
|
||||
script = []
|
||||
for cmd in script_section:
|
||||
if isinstance(cmd, list):
|
||||
for subcmd in cmd:
|
||||
script.append(op(subcmd))
|
||||
else:
|
||||
script.append(op(cmd))
|
||||
|
||||
return script
|
||||
|
||||
|
||||
class SpackCI:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config, phases, staged_phases):
|
||||
"""Given the information from the ci section of the config
|
||||
and the job phases setup meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
self.ci_config = ci_config
|
||||
self.named_jobs = ["any", "build", "cleanup", "noop", "reindex", "signing"]
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"temporary-storage-url-prefix": self.ci_config.get(
|
||||
"temporary-storage-url-prefix", None
|
||||
),
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"bootstrap": self.ci_config.get(
|
||||
"bootstrap", []
|
||||
), # This is deprecated and should be removed
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
"target": self.ci_config.get("target", "gitlab"),
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
||||
jobs[dag_hash] = self.__init_job(spec)
|
||||
|
||||
for name in self.named_jobs:
|
||||
# Skip the special named jobs
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, spec):
|
||||
"""Initialize job object"""
|
||||
return {"spec": spec, "attributes": {}}
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
and if so return the name otherwise return none.
|
||||
"""
|
||||
for _name in self.named_jobs:
|
||||
keys = ["{0}-job".format(_name), "{0}-job-remove".format(_name)]
|
||||
if any([key for key in keys if key in section]):
|
||||
return _name
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
jname = "{0}-job{1}".format(name, suffix)
|
||||
else:
|
||||
jname = "{0}-job".format(name)
|
||||
|
||||
return jname
|
||||
|
||||
def __apply_submapping(self, dest, spec, section):
|
||||
"""Apply submapping setion to the IR dict"""
|
||||
matched = False
|
||||
only_first = section.get("match_behavior", "first") == "first"
|
||||
|
||||
for match_attrs in reversed(section["submapping"]):
|
||||
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
|
||||
for match_string in match_attrs["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "build-job-remove" in match_attrs:
|
||||
spack.config.remove_yaml(dest, attrs["build-job-remove"])
|
||||
if "build-job" in match_attrs:
|
||||
spack.config.merge_yaml(dest, attrs["build-job"])
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return dest
|
||||
|
||||
# Generate IR from the configs
|
||||
def generate_ir(self):
|
||||
"""Generate the IR from the Spack CI configurations."""
|
||||
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
# Implicit job defaults
|
||||
defaults = [
|
||||
{
|
||||
"build-job": {
|
||||
"script": [
|
||||
"cd {env_dir}",
|
||||
"spack env activate --without-view .",
|
||||
"spack ci rebuild",
|
||||
]
|
||||
}
|
||||
},
|
||||
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
|
||||
]
|
||||
|
||||
# Job overrides
|
||||
overrides = [
|
||||
# Reindex script
|
||||
{
|
||||
"reindex-job": {
|
||||
"script:": [
|
||||
"spack buildcache update-index --keys --mirror-url {index_target_mirror}"
|
||||
]
|
||||
}
|
||||
},
|
||||
# Cleanup script
|
||||
{
|
||||
"cleanup-job": {
|
||||
"script:": [
|
||||
"spack -d mirror destroy --mirror-url {mirror_prefix}/$CI_PIPELINE_ID"
|
||||
]
|
||||
}
|
||||
},
|
||||
# Add signing job tags
|
||||
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
|
||||
# Remove reserved tags
|
||||
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
|
||||
]
|
||||
|
||||
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
|
||||
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
remove_job_name = self.__job_name(name, suffix="-remove")
|
||||
merge_job_name = self.__job_name(name)
|
||||
do_remove = remove_job_name in section
|
||||
do_merge = merge_job_name in section
|
||||
|
||||
def _apply_section(dest, src):
|
||||
if do_remove:
|
||||
dest = spack.config.remove_yaml(dest, src[remove_job_name])
|
||||
if do_merge:
|
||||
dest = copy.copy(spack.config.merge_yaml(dest, src[merge_job_name]))
|
||||
|
||||
if name == "build":
|
||||
# Apply attributes to all build jobs
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
_apply_section(job["attributes"], section)
|
||||
elif name == "any":
|
||||
# Apply section attributes too all jobs
|
||||
for _, job in jobs.items():
|
||||
_apply_section(job["attributes"], section)
|
||||
else:
|
||||
# Create a signing job if there is script and the job hasn't
|
||||
# been initialized yet
|
||||
if name == "signing" and name not in jobs:
|
||||
if "signing-job" in section:
|
||||
if "script" not in section["signing-job"]:
|
||||
continue
|
||||
else:
|
||||
jobs[name] = self.__init_job("")
|
||||
# Apply attributes to named job
|
||||
_apply_section(jobs[name]["attributes"], section)
|
||||
|
||||
elif has_submapping:
|
||||
# Apply section jobs with specs to match
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["spec"] = job["spec"].name
|
||||
|
||||
return self.ir
|
||||
|
||||
|
||||
def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
@@ -572,14 +752,32 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
if "gitlab-ci" not in yaml_root:
|
||||
tty.die('Environment yaml does not have "gitlab-ci" section')
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
gitlab_ci = yaml_root["gitlab-ci"]
|
||||
if not ci_config:
|
||||
tty.die('Environment yaml does not have "ci" section')
|
||||
|
||||
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if "target" in ci_config and ci_config["target"] != "gitlab":
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||
build_group = cdash_handler.build_group if cdash_handler else None
|
||||
|
||||
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
|
||||
if dependent_depth is not None:
|
||||
try:
|
||||
dependent_depth = int(dependent_depth)
|
||||
except (TypeError, ValueError):
|
||||
tty.warn(
|
||||
f"Unrecognized value ({dependent_depth}) "
|
||||
"provided for SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH, "
|
||||
"ignoring it."
|
||||
)
|
||||
dependent_depth = None
|
||||
|
||||
prune_untouched_packages = False
|
||||
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
||||
@@ -595,7 +793,9 @@ def generate_gitlab_ci_yaml(
|
||||
tty.debug("affected pkgs:")
|
||||
for p in affected_pkgs:
|
||||
tty.debug(" {0}".format(p))
|
||||
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
||||
affected_specs = get_spec_filter_list(
|
||||
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||
)
|
||||
tty.debug("all affected specs:")
|
||||
for s in affected_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
@@ -637,25 +837,25 @@ def generate_gitlab_ci_yaml(
|
||||
# trying to build.
|
||||
broken_specs_url = ""
|
||||
known_broken_specs_encountered = []
|
||||
if "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in gitlab_ci:
|
||||
enable_artifacts_buildcache = gitlab_ci["enable-artifacts-buildcache"]
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
if "rebuild-index" in gitlab_ci and gitlab_ci["rebuild-index"] is False:
|
||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||
rebuild_index_enabled = False
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in gitlab_ci:
|
||||
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if "bootstrap" in gitlab_ci:
|
||||
for phase in gitlab_ci["bootstrap"]:
|
||||
if "bootstrap" in ci_config:
|
||||
for phase in ci_config["bootstrap"]:
|
||||
try:
|
||||
phase_name = phase.get("name")
|
||||
strip_compilers = phase.get("compiler-agnostic")
|
||||
@@ -720,6 +920,27 @@ def generate_gitlab_ci_yaml(
|
||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
shutil.copyfile(env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
|
||||
|
||||
with open(env.manifest_path, "r") as env_fd:
|
||||
env_yaml_root = syaml.load(env_fd)
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
cli_scopes = [
|
||||
os.path.abspath(s.path)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
include_scopes = []
|
||||
for scope in cli_scopes:
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
@@ -731,7 +952,7 @@ def generate_gitlab_ci_yaml(
|
||||
# generation job and the rebuild jobs. This can happen when gitlab
|
||||
# checks out the project into a runner-specific directory, for example,
|
||||
# and different runners are picked for generate and rebuild jobs.
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
rel_artifacts_root = os.path.relpath(pipeline_artifacts_dir, ci_project_dir)
|
||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
@@ -745,7 +966,7 @@ def generate_gitlab_ci_yaml(
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
tty.warn(e)
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
@@ -802,6 +1023,9 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
before_script, after_script = None, None
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
@@ -829,7 +1053,7 @@ def generate_gitlab_ci_yaml(
|
||||
spec_record["needs_rebuild"] = False
|
||||
continue
|
||||
|
||||
runner_attribs = _find_matching_config(release_spec, gitlab_ci)
|
||||
runner_attribs = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not runner_attribs:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
@@ -860,23 +1084,21 @@ def generate_gitlab_ci_yaml(
|
||||
except AttributeError:
|
||||
image_name = build_image
|
||||
|
||||
job_script = ["spack env activate --without-view ."]
|
||||
if "script" not in runner_attribs:
|
||||
raise AttributeError
|
||||
|
||||
if artifacts_root:
|
||||
job_script.insert(0, "cd {0}".format(concrete_env_dir))
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||
|
||||
job_script.extend(["spack ci rebuild"])
|
||||
|
||||
if "script" in runner_attribs:
|
||||
job_script = [s for s in runner_attribs["script"]]
|
||||
job_script = _unpack_script(runner_attribs["script"], op=main_script_replacements)
|
||||
|
||||
before_script = None
|
||||
if "before_script" in runner_attribs:
|
||||
before_script = [s for s in runner_attribs["before_script"]]
|
||||
before_script = _unpack_script(runner_attribs["before_script"])
|
||||
|
||||
after_script = None
|
||||
if "after_script" in runner_attribs:
|
||||
after_script = [s for s in runner_attribs["after_script"]]
|
||||
after_script = _unpack_script(runner_attribs["after_script"])
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(
|
||||
@@ -938,7 +1160,7 @@ def generate_gitlab_ci_yaml(
|
||||
bs_arch = c_spec.architecture
|
||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||
if (
|
||||
c_spec.satisfies(compiler_pkg_spec)
|
||||
c_spec.intersects(compiler_pkg_spec)
|
||||
and bs_arch_family == spec_arch_family
|
||||
):
|
||||
# We found the bootstrap compiler this release spec
|
||||
@@ -1120,19 +1342,6 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
||||
service_job_config = None
|
||||
if "service-job-attributes" in gitlab_ci:
|
||||
service_job_config = gitlab_ci["service-job-attributes"]
|
||||
|
||||
default_attrs = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
# 'script',
|
||||
"after_script",
|
||||
]
|
||||
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
@@ -1144,55 +1353,29 @@ def generate_gitlab_ci_yaml(
|
||||
# schedule a job to clean up the temporary storage location
|
||||
# associated with this pipeline.
|
||||
stage_names.append("cleanup-temp-storage")
|
||||
cleanup_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, cleanup_job)
|
||||
|
||||
if "tags" in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job["tags"])
|
||||
cleanup_job["tags"] = service_tags
|
||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||
|
||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||
cleanup_job["script"] = [
|
||||
"spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID".format(
|
||||
temp_storage_url_prefix
|
||||
)
|
||||
]
|
||||
cleanup_job["when"] = "always"
|
||||
cleanup_job["retry"] = service_job_retries
|
||||
cleanup_job["interruptible"] = True
|
||||
|
||||
cleanup_job["script"] = _unpack_script(
|
||||
cleanup_job["script"],
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
"signing-job-attributes" in gitlab_ci
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and spack_pipeline_type == "spack_protected_branch"
|
||||
):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append("stage-sign-pkgs")
|
||||
signing_job_config = gitlab_ci["signing-job-attributes"]
|
||||
signing_job = {}
|
||||
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
"script",
|
||||
"after_script",
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy, signing_job_config, signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if "tags" in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job["tags"])
|
||||
|
||||
for tag in ["aws", "protected", "notary"]:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job["tags"] = signing_job_tags
|
||||
signing_job["script"] = _unpack_script(signing_job["script"])
|
||||
|
||||
signing_job["stage"] = "stage-sign-pkgs"
|
||||
signing_job["when"] = "always"
|
||||
@@ -1204,23 +1387,17 @@ def generate_gitlab_ci_yaml(
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, final_job)
|
||||
|
||||
if "tags" in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job["tags"])
|
||||
final_job["tags"] = service_tags
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
final_job["script"] = [
|
||||
"spack buildcache update-index --keys --mirror-url {0}".format(index_target_mirror)
|
||||
]
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
final_job["interruptible"] = True
|
||||
@@ -1301,13 +1478,7 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
# No jobs were generated
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
noop_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, noop_job)
|
||||
|
||||
if "script" not in noop_job:
|
||||
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
@@ -1321,7 +1492,7 @@ def generate_gitlab_ci_yaml(
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -1501,7 +1672,10 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
tty.warn(f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to: {err}")
|
||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||
src, artifacts_dir, str(err)
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
@@ -1721,6 +1895,7 @@ def reproduce_ci_job(url, work_dir):
|
||||
function is a set of printed instructions for running docker and then
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
@@ -1885,7 +2060,9 @@ def reproduce_ci_job(url, work_dir):
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm -v {0}:{1} -ti {2}\n".format(work_dir, mount_as_dir, job_image)
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
else:
|
||||
@@ -1936,13 +2113,16 @@ def process_command(name, commands, repro_dir):
|
||||
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
||||
# quoted using double quotes.
|
||||
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
|
||||
full_command = " && ".join(map(args_to_string, commands))
|
||||
full_command = " \n ".join(map(args_to_string, commands))
|
||||
|
||||
# Write the command to a shell script
|
||||
script = "{0}.sh".format(name)
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
fd.write("\n")
|
||||
|
||||
|
@@ -498,11 +498,11 @@ def list_fn(args):
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
specs = [s for s in specs if s.intersects(arch)]
|
||||
|
||||
if args.specs:
|
||||
constraints = set(args.specs)
|
||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
||||
specs = [s for s in specs if any(s.intersects(c) for c in constraints)]
|
||||
if sys.stdout.isatty():
|
||||
builds = len(specs)
|
||||
tty.msg("%s." % plural(builds, "cached build"))
|
||||
|
@@ -33,12 +33,6 @@ def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
if variable_name in os.environ:
|
||||
return os.environ.get(variable_name)
|
||||
return None
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -255,10 +249,9 @@ def ci_rebuild(args):
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
gitlab_ci = yaml_root["gitlab-ci"] if "gitlab-ci" in yaml_root else None
|
||||
if not gitlab_ci:
|
||||
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
|
||||
ci_config = cfg.get("ci")
|
||||
if not ci_config:
|
||||
tty.die("spack ci rebuild requires an env containing ci cfg")
|
||||
|
||||
tty.msg(
|
||||
"SPACK_BUILDCACHE_DESTINATION={0}".format(
|
||||
@@ -269,27 +262,27 @@ def ci_rebuild(args):
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
|
||||
job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = get_env_var("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
|
||||
local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = get_env_var("CI_PIPELINE_ID")
|
||||
ci_job_name = get_env_var("CI_JOB_NAME")
|
||||
signing_key = get_env_var("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = get_env_var("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = get_env_var("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
||||
pipeline_artifacts_dir = os.environ.get("SPACK_ARTIFACTS_ROOT")
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = get_env_var("CI_PROJECT_DIR")
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
@@ -306,8 +299,10 @@ def ci_rebuild(args):
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
# support that task.
|
||||
cdash_handler = spack_ci.CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
if cdash_handler:
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = None
|
||||
if "build-group" in cdash_config:
|
||||
cdash_handler = spack_ci.CDashHandler(cdash_config)
|
||||
tty.debug("cdash url = {0}".format(cdash_handler.url))
|
||||
tty.debug("cdash project = {0}".format(cdash_handler.project))
|
||||
tty.debug("cdash project_enc = {0}".format(cdash_handler.project_enc))
|
||||
@@ -340,13 +335,13 @@ def ci_rebuild(args):
|
||||
pipeline_mirror_url = None
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in gitlab_ci:
|
||||
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in gitlab_ci:
|
||||
enable_artifacts_mirror = gitlab_ci["enable-artifacts-buildcache"]
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
if enable_artifacts_mirror or (
|
||||
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||
):
|
||||
@@ -593,8 +588,8 @@ def ci_rebuild(args):
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug("Install failed on develop")
|
||||
if "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path))
|
||||
@@ -602,8 +597,8 @@ def ci_rebuild(args):
|
||||
broken_spec_path,
|
||||
job_spec_pkg_name,
|
||||
spack_ci_stack_name,
|
||||
get_env_var("CI_JOB_URL"),
|
||||
get_env_var("CI_PIPELINE_URL"),
|
||||
os.environ.get("CI_JOB_URL"),
|
||||
os.environ.get("CI_PIPELINE_URL"),
|
||||
job_spec.to_dict(hash=ht.dag_hash),
|
||||
)
|
||||
|
||||
@@ -615,17 +610,14 @@ def ci_rebuild(args):
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
broken_tests = (
|
||||
"broken-tests-packages" in gitlab_ci
|
||||
and job_spec.name in gitlab_ci["broken-tests-packages"]
|
||||
"broken-tests-packages" in ci_config
|
||||
and job_spec.name in ci_config["broken-tests-packages"]
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn(
|
||||
"Unable to run stand-alone tests since listed in "
|
||||
"gitlab-ci's 'broken-tests-packages'"
|
||||
)
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in gitlab-ci's broken-tests-packages"
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
elif args.tests:
|
||||
@@ -688,8 +680,8 @@ def ci_rebuild(args):
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if spack_is_develop_pipeline and "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
@@ -706,9 +698,9 @@ def ci_rebuild(args):
|
||||
else:
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = get_env_var("CI_API_V4_URL")
|
||||
ci_project_id = get_env_var("CI_PROJECT_ID")
|
||||
ci_job_id = get_env_var("CI_JOB_ID")
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
|
@@ -514,7 +514,15 @@ def add_concretizer_args(subparser):
|
||||
dest="concretizer:reuse",
|
||||
const=True,
|
||||
default=None,
|
||||
help="reuse installed dependencies/buildcaches when possible",
|
||||
help="reuse installed packages/buildcaches when possible",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--reuse-deps",
|
||||
action=ConfigSetAction,
|
||||
dest="concretizer:reuse",
|
||||
const="dependencies",
|
||||
default=None,
|
||||
help="reuse installed dependencies only",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -39,19 +39,14 @@
|
||||
compiler flags:
|
||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||
fflags, ldflags, ldlibs
|
||||
@g{cflags=="flags"} propagate flags to package dependencies
|
||||
cppflags, cflags, cxxflags, fflags,
|
||||
ldflags, ldlibs
|
||||
@g{==} propagate flags to package dependencies
|
||||
|
||||
variants:
|
||||
@B{+variant} enable <variant>
|
||||
@B{++variant} propagate enable <variant>
|
||||
@r{-variant} or @r{~variant} disable <variant>
|
||||
@r{--variant} or @r{~~variant} propagate disable <variant>
|
||||
@B{variant=value} set non-boolean <variant> to <value>
|
||||
@B{variant==value} propagate non-boolean <variant> to <value>
|
||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||
@B{variant==value1,value2,value3} propagate multi-value <variant> values
|
||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
|
@@ -283,7 +283,7 @@ def print_tests(pkg):
|
||||
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
|
||||
if pkg.name in c_names:
|
||||
v_names.extend(["c", "cxx", "fortran"])
|
||||
if pkg.spec.satisfies("llvm+clang"):
|
||||
if pkg.spec.intersects("llvm+clang"):
|
||||
v_names.extend(["c", "cxx"])
|
||||
# TODO Refactor END
|
||||
|
||||
|
@@ -263,146 +263,6 @@ def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> s
|
||||
return result
|
||||
|
||||
|
||||
def install_specs(specs, install_kwargs, cli_args):
|
||||
try:
|
||||
if ev.active_environment():
|
||||
install_specs_inside_environment(specs, install_kwargs, cli_args)
|
||||
else:
|
||||
install_specs_outside_environment(specs, install_kwargs)
|
||||
except spack.build_environment.InstallError as e:
|
||||
if cli_args.show_log_on_error:
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.build_log_path) as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def install_specs_inside_environment(specs, install_kwargs, cli_args):
|
||||
specs_to_install, specs_to_add = [], []
|
||||
env = ev.active_environment()
|
||||
for abstract, concrete in specs:
|
||||
# This won't find specs added to the env since last
|
||||
# concretize, therefore should we consider enforcing
|
||||
# concretization of the env before allowing to install
|
||||
# specs?
|
||||
m_spec = env.matching_spec(abstract)
|
||||
|
||||
# If there is any ambiguity in the above call to matching_spec
|
||||
# (i.e. if more than one spec in the environment matches), then
|
||||
# SpackEnvironmentError is raised, with a message listing the
|
||||
# the matches. Getting to this point means there were either
|
||||
# no matches or exactly one match.
|
||||
|
||||
if not m_spec and not cli_args.add:
|
||||
msg = (
|
||||
"Cannot install '{0}' because it is not in the current environment."
|
||||
" You can add it to the environment with 'spack add {0}', or as part"
|
||||
" of the install command with 'spack install --add {0}'"
|
||||
).format(str(abstract))
|
||||
tty.die(msg)
|
||||
|
||||
if not m_spec:
|
||||
tty.debug("adding {0} as a root".format(abstract.name))
|
||||
specs_to_add.append((abstract, concrete))
|
||||
continue
|
||||
|
||||
tty.debug("exactly one match for {0} in env -> {1}".format(m_spec.name, m_spec.dag_hash()))
|
||||
|
||||
if m_spec in env.roots() or not cli_args.add:
|
||||
# either the single match is a root spec (in which case
|
||||
# the spec is not added to the env again), or the user did
|
||||
# not specify --add (in which case it is assumed we are
|
||||
# installing already-concretized specs in the env)
|
||||
tty.debug("just install {0}".format(m_spec.name))
|
||||
specs_to_install.append(m_spec)
|
||||
else:
|
||||
# the single match is not a root (i.e. it's a dependency),
|
||||
# and --add was specified, so we'll add it as a
|
||||
# root before installing
|
||||
tty.debug("add {0} then install it".format(m_spec.name))
|
||||
specs_to_add.append((abstract, concrete))
|
||||
if specs_to_add:
|
||||
tty.debug("Adding the following specs as roots:")
|
||||
for abstract, concrete in specs_to_add:
|
||||
tty.debug(" {0}".format(abstract.name))
|
||||
with env.write_transaction():
|
||||
specs_to_install.append(env.concretize_and_add(abstract, concrete))
|
||||
env.write(regenerate=False)
|
||||
# Install the validated list of cli specs
|
||||
if specs_to_install:
|
||||
tty.debug("Installing the following cli specs:")
|
||||
for s in specs_to_install:
|
||||
tty.debug(" {0}".format(s.name))
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
|
||||
|
||||
def install_specs_outside_environment(specs, install_kwargs):
|
||||
installs = [(concrete.package, install_kwargs) for _, concrete in specs]
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
|
||||
|
||||
def install_all_specs_from_active_environment(
|
||||
install_kwargs, only_concrete, cli_test_arg, reporter_factory
|
||||
):
|
||||
"""Install all specs from the active environment
|
||||
|
||||
Args:
|
||||
install_kwargs (dict): dictionary of options to be passed to the installer
|
||||
only_concrete (bool): if true don't concretize the environment, but install
|
||||
only the specs that are already concrete
|
||||
cli_test_arg (bool or str): command line argument to select which test to run
|
||||
reporter: reporter object for the installations
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
msg = "install requires a package argument or active environment"
|
||||
if "spack.yaml" in os.listdir(os.getcwd()):
|
||||
# There's a spack.yaml file in the working dir, the user may
|
||||
# have intended to use that
|
||||
msg += "\n\n"
|
||||
msg += "Did you mean to install using the `spack.yaml`"
|
||||
msg += " in this directory? Try: \n"
|
||||
msg += " spack env activate .\n"
|
||||
msg += " spack install\n"
|
||||
msg += " OR\n"
|
||||
msg += " spack --env . install"
|
||||
tty.die(msg)
|
||||
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(env.user_specs, cli_test_arg)
|
||||
if not only_concrete:
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(tests=install_kwargs["tests"])
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
env.write(regenerate=False)
|
||||
|
||||
specs = env.all_specs()
|
||||
if not specs:
|
||||
msg = "{0} environment has no specs to install".format(env.name)
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
reporter = reporter_factory(specs) or lang.nullcontext()
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter:
|
||||
env.install_all(**install_kwargs)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}".format(env.name))
|
||||
with env.write_transaction():
|
||||
# write env to trigger view generation and modulefile
|
||||
# generation
|
||||
env.write()
|
||||
|
||||
|
||||
def compute_tests_install_kwargs(specs, cli_test_arg):
|
||||
"""Translate the test cli argument into the proper install argument"""
|
||||
if cli_test_arg == "all":
|
||||
@@ -412,43 +272,6 @@ def compute_tests_install_kwargs(specs, cli_test_arg):
|
||||
return False
|
||||
|
||||
|
||||
def specs_from_cli(args, install_kwargs):
|
||||
"""Return abstract and concrete spec parsed from the command line."""
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(abstract_specs, args.test)
|
||||
try:
|
||||
concrete_specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=install_kwargs["tests"]
|
||||
)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
if args.log_format is not None:
|
||||
reporter = args.reporter()
|
||||
reporter.concretization_report(report_filename(args, abstract_specs), e.message)
|
||||
raise
|
||||
return abstract_specs, concrete_specs
|
||||
|
||||
|
||||
def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
tty.warn(msg.format(file))
|
||||
continue
|
||||
result.append(concretized)
|
||||
return result
|
||||
|
||||
|
||||
def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
if args.yes_to_all:
|
||||
return
|
||||
@@ -475,12 +298,40 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
tty.die("Reinstallation aborted.")
|
||||
|
||||
|
||||
def _dump_log_on_error(e: spack.build_environment.InstallError):
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.build_log_path, errors="replace") as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
|
||||
|
||||
def _die_require_env():
|
||||
msg = "install requires a package argument or active environment"
|
||||
if "spack.yaml" in os.listdir(os.getcwd()):
|
||||
# There's a spack.yaml file in the working dir, the user may
|
||||
# have intended to use that
|
||||
msg += (
|
||||
"\n\n"
|
||||
"Did you mean to install using the `spack.yaml`"
|
||||
" in this directory? Try: \n"
|
||||
" spack env activate .\n"
|
||||
" spack install\n"
|
||||
" OR\n"
|
||||
" spack --env . install"
|
||||
)
|
||||
tty.die(msg)
|
||||
|
||||
|
||||
def install(parser, args):
|
||||
# TODO: unify args.verbose?
|
||||
tty.set_verbose(args.verbose or args.install_verbose)
|
||||
|
||||
if args.help_cdash:
|
||||
spack.cmd.common.arguments.print_cdash_help()
|
||||
arguments.print_cdash_help()
|
||||
return
|
||||
|
||||
if args.no_checksum:
|
||||
@@ -489,43 +340,150 @@ def install(parser, args):
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
arguments.sanitize_reporter_options(args)
|
||||
|
||||
def reporter_factory(specs):
|
||||
if args.log_format is None:
|
||||
return None
|
||||
return lang.nullcontext()
|
||||
|
||||
context_manager = spack.report.build_context_manager(
|
||||
return spack.report.build_context_manager(
|
||||
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||
)
|
||||
return context_manager
|
||||
|
||||
install_kwargs = install_kwargs_from_args(args)
|
||||
|
||||
if not args.spec and not args.specfiles:
|
||||
# If there are no args but an active environment then install the packages from it.
|
||||
install_all_specs_from_active_environment(
|
||||
install_kwargs=install_kwargs,
|
||||
only_concrete=args.only_concrete,
|
||||
cli_test_arg=args.test,
|
||||
reporter_factory=reporter_factory,
|
||||
)
|
||||
env = ev.active_environment()
|
||||
|
||||
if not env and not args.spec and not args.specfiles:
|
||||
_die_require_env()
|
||||
|
||||
try:
|
||||
if env:
|
||||
install_with_active_env(env, args, install_kwargs, reporter_factory)
|
||||
else:
|
||||
install_without_active_env(args, install_kwargs, reporter_factory)
|
||||
except spack.build_environment.InstallError as e:
|
||||
if args.show_log_on_error:
|
||||
_dump_log_on_error(e)
|
||||
raise
|
||||
|
||||
|
||||
def _maybe_add_and_concretize(args, env, specs):
|
||||
"""Handle the overloaded spack install behavior of adding
|
||||
and automatically concretizing specs"""
|
||||
|
||||
# Users can opt out of accidental concretizations with --only-concrete
|
||||
if args.only_concrete:
|
||||
return
|
||||
|
||||
# Specs from CLI
|
||||
abstract_specs, concrete_specs = specs_from_cli(args, install_kwargs)
|
||||
# Otherwise, we will modify the environment.
|
||||
with env.write_transaction():
|
||||
# `spack add` adds these specs.
|
||||
if args.add:
|
||||
for spec in specs:
|
||||
env.add(spec)
|
||||
|
||||
# Concrete specs from YAML or JSON files
|
||||
specs_from_file = concrete_specs_from_file(args)
|
||||
abstract_specs.extend(specs_from_file)
|
||||
concrete_specs.extend(specs_from_file)
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
env.write(regenerate=False)
|
||||
|
||||
|
||||
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
# The following two commands are equivalent:
|
||||
# 1. `spack install --add x y z`
|
||||
# 2. `spack add x y z && spack concretize && spack install --only-concrete`
|
||||
# here we do the `add` and `concretize` part.
|
||||
_maybe_add_and_concretize(args, env, specs)
|
||||
|
||||
# Now we're doing `spack install --only-concrete`.
|
||||
if args.add or not specs:
|
||||
specs_to_install = env.concrete_roots()
|
||||
if not specs_to_install:
|
||||
tty.msg(f"{env.name} environment has no specs to install")
|
||||
return
|
||||
|
||||
# `spack install x y z` without --add is installing matching specs in the env.
|
||||
else:
|
||||
specs_to_install = env.all_matching_specs(*specs)
|
||||
if not specs_to_install:
|
||||
msg = (
|
||||
"Cannot install '{0}' because no matching specs are in the current environment."
|
||||
" You can add specs to the environment with 'spack add {0}', or as part"
|
||||
" of the install command with 'spack install --add {0}'"
|
||||
).format(" ".join(args.spec))
|
||||
tty.die(msg)
|
||||
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(specs_to_install, args.test)
|
||||
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(specs_to_install, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
|
||||
|
||||
try:
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
"""Return abstract and concrete spec parsed from the command line."""
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(abstract_specs, args.test)
|
||||
try:
|
||||
concrete_specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=install_kwargs["tests"]
|
||||
)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
if args.log_format is not None:
|
||||
reporter = args.reporter()
|
||||
reporter.concretization_report(report_filename(args, abstract_specs), e.message)
|
||||
raise
|
||||
return concrete_specs
|
||||
|
||||
|
||||
def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
tty.warn(msg.format(file))
|
||||
continue
|
||||
result.append(concretized)
|
||||
return result
|
||||
|
||||
|
||||
def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
|
||||
|
||||
if len(concrete_specs) == 0:
|
||||
tty.die("The `spack install` command requires a spec to install.")
|
||||
|
||||
reporter = reporter_factory(concrete_specs) or lang.nullcontext()
|
||||
with reporter:
|
||||
with reporter_factory(concrete_specs):
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
install_specs(zip(abstract_specs, concrete_specs), install_kwargs, args)
|
||||
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
|
@@ -335,7 +335,7 @@ def not_excluded_fn(args):
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
|
||||
def not_excluded(x):
|
||||
return not any(x.satisfies(y, strict=True) for y in exclude_specs)
|
||||
return not any(x.satisfies(y) for y in exclude_specs)
|
||||
|
||||
return not_excluded
|
||||
|
||||
|
@@ -26,7 +26,6 @@
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
section = "developer"
|
||||
level = "long"
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -212,7 +211,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
|
@@ -28,8 +28,6 @@
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
@@ -598,7 +596,7 @@ def search_regexps(cls, language):
|
||||
suffixes = [""]
|
||||
# Windows compilers generally have an extension of some sort
|
||||
# as do most files on Windows, handle that case here
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
ext_suf = [ext]
|
||||
|
@@ -84,7 +84,7 @@ def _to_dict(compiler):
|
||||
d = {}
|
||||
d["spec"] = str(compiler.spec)
|
||||
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
|
||||
d["flags"] = dict((fname, fvals) for fname, fvals in compiler.flags)
|
||||
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
|
||||
d["flags"].update(
|
||||
dict(
|
||||
(attr, getattr(compiler, attr, None))
|
||||
|
@@ -61,7 +61,7 @@ def is_clang_based(self):
|
||||
return version >= ver("9.0") and "classic" not in str(version)
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"[Vv]ersion.*?(\d+(\.\d+)+)"
|
||||
version_regex = r"[Cc]ray (?:clang|C :|C\+\+ :|Fortran :) [Vv]ersion.*?(\d+(\.\d+)+)"
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
|
@@ -122,7 +122,19 @@ def platform_toolset_ver(self):
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
return spack.compiler.get_compiler_version_output(self.cc)
|
||||
return Version(
|
||||
re.search(
|
||||
Msvc.version_regex,
|
||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
||||
).group(1)
|
||||
)
|
||||
|
||||
@property
|
||||
def vs_root(self):
|
||||
# The MSVC install root is located at a fix level above the compiler
|
||||
# and is referenceable idiomatically via the pattern below
|
||||
# this should be consistent accross versions
|
||||
return os.path.abspath(os.path.join(self.cc, "../../../../../../../.."))
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set environment variables for MSVC using the
|
||||
|
@@ -134,7 +134,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
|
||||
externals = spec_externals(cspec)
|
||||
for ext in externals:
|
||||
if ext.satisfies(spec):
|
||||
if ext.intersects(spec):
|
||||
usable.append(ext)
|
||||
|
||||
# If nothing is in the usable list now, it's because we aren't
|
||||
@@ -200,7 +200,7 @@ def concretize_version(self, spec):
|
||||
|
||||
# List of versions we could consider, in sorted order
|
||||
pkg_versions = spec.package_class.versions
|
||||
usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)]
|
||||
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
|
||||
|
||||
yaml_prefs = PackagePrefs(spec.name, "version")
|
||||
|
||||
@@ -344,7 +344,7 @@ def concretize_architecture(self, spec):
|
||||
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
||||
|
||||
if not new_target_arch.satisfies(curr_target_arch):
|
||||
if not new_target_arch.intersects(curr_target_arch):
|
||||
# new_target is an incorrect guess based on preferences
|
||||
# and/or default
|
||||
valid_target_ranges = str(curr_target).split(",")
|
||||
|
@@ -77,6 +77,8 @@
|
||||
"config": spack.schema.config.schema,
|
||||
"upstreams": spack.schema.upstreams.schema,
|
||||
"bootstrap": spack.schema.bootstrap.schema,
|
||||
"ci": spack.schema.ci.schema,
|
||||
"cdash": spack.schema.cdash.schema,
|
||||
}
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
@@ -360,6 +362,12 @@ def _process_dict_keyname_overrides(data):
|
||||
if sk.endswith(":"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.override = True
|
||||
elif sk.endswith("+"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.prepend = True
|
||||
elif sk.endswith("-"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.append = True
|
||||
else:
|
||||
key = sk
|
||||
|
||||
@@ -1040,6 +1048,33 @@ def _override(string):
|
||||
return hasattr(string, "override") and string.override
|
||||
|
||||
|
||||
def _append(string):
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
and if they do, their values append lower-precedence
|
||||
configs.
|
||||
|
||||
str, str : concatenate strings.
|
||||
[obj], [obj] : append lists.
|
||||
|
||||
"""
|
||||
return getattr(string, "append", False)
|
||||
|
||||
|
||||
def _prepend(string):
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
and if they do, their values prepend lower-precedence
|
||||
configs.
|
||||
|
||||
str, str : concatenate strings.
|
||||
[obj], [obj] : prepend lists. (default behavior)
|
||||
"""
|
||||
return getattr(string, "prepend", False)
|
||||
|
||||
|
||||
def _mark_internal(data, name):
|
||||
"""Add a simple name mark to raw YAML/JSON data.
|
||||
|
||||
@@ -1102,7 +1137,57 @@ def get_valid_type(path):
|
||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||
|
||||
|
||||
def merge_yaml(dest, source):
|
||||
def remove_yaml(dest, source):
|
||||
"""UnMerges source from dest; entries in source take precedence over dest.
|
||||
|
||||
This routine may modify dest and should be assigned to dest, in
|
||||
case dest was None to begin with, e.g.:
|
||||
|
||||
dest = remove_yaml(dest, source)
|
||||
|
||||
In the result, elements from lists from ``source`` will not appear
|
||||
as elements of lists from ``dest``. Likewise, when iterating over keys
|
||||
or items in merged ``OrderedDict`` objects, keys from ``source`` will not
|
||||
appear as keys in ``dest``.
|
||||
|
||||
Config file authors can optionally end any attribute in a dict
|
||||
with `::` instead of `:`, and the key will remove the entire section
|
||||
from ``dest``
|
||||
"""
|
||||
|
||||
def they_are(t):
|
||||
return isinstance(dest, t) and isinstance(source, t)
|
||||
|
||||
# If source is None, overwrite with source.
|
||||
if source is None:
|
||||
return dest
|
||||
|
||||
# Source list is prepended (for precedence)
|
||||
if they_are(list):
|
||||
# Make sure to copy ruamel comments
|
||||
dest[:] = [x for x in dest if x not in source]
|
||||
return dest
|
||||
|
||||
# Source dict is merged into dest.
|
||||
elif they_are(dict):
|
||||
for sk, sv in source.items():
|
||||
# always remove the dest items. Python dicts do not overwrite
|
||||
# keys on insert, so this ensures that source keys are copied
|
||||
# into dest along with mark provenance (i.e., file/line info).
|
||||
unmerge = sk in dest
|
||||
old_dest_value = dest.pop(sk, None)
|
||||
|
||||
if unmerge and not spack.config._override(sk):
|
||||
dest[sk] = remove_yaml(old_dest_value, sv)
|
||||
|
||||
return dest
|
||||
|
||||
# If we reach here source and dest are either different types or are
|
||||
# not both lists or dicts: replace with source.
|
||||
return dest
|
||||
|
||||
|
||||
def merge_yaml(dest, source, prepend=False, append=False):
|
||||
"""Merges source into dest; entries in source take precedence over dest.
|
||||
|
||||
This routine may modify dest and should be assigned to dest, in
|
||||
@@ -1118,6 +1203,9 @@ def merge_yaml(dest, source):
|
||||
Config file authors can optionally end any attribute in a dict
|
||||
with `::` instead of `:`, and the key will override that of the
|
||||
parent instead of merging.
|
||||
|
||||
`+:` will extend the default prepend merge strategy to include string concatenation
|
||||
`-:` will change the merge strategy to append, it also includes string concatentation
|
||||
"""
|
||||
|
||||
def they_are(t):
|
||||
@@ -1129,8 +1217,12 @@ def they_are(t):
|
||||
|
||||
# Source list is prepended (for precedence)
|
||||
if they_are(list):
|
||||
# Make sure to copy ruamel comments
|
||||
dest[:] = source + [x for x in dest if x not in source]
|
||||
if append:
|
||||
# Make sure to copy ruamel comments
|
||||
dest[:] = [x for x in dest if x not in source] + source
|
||||
else:
|
||||
# Make sure to copy ruamel comments
|
||||
dest[:] = source + [x for x in dest if x not in source]
|
||||
return dest
|
||||
|
||||
# Source dict is merged into dest.
|
||||
@@ -1147,7 +1239,7 @@ def they_are(t):
|
||||
old_dest_value = dest.pop(sk, None)
|
||||
|
||||
if merge and not _override(sk):
|
||||
dest[sk] = merge_yaml(old_dest_value, sv)
|
||||
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
|
||||
else:
|
||||
# if sk ended with ::, or if it's new, completely override
|
||||
dest[sk] = copy.deepcopy(sv)
|
||||
@@ -1158,6 +1250,13 @@ def they_are(t):
|
||||
|
||||
return dest
|
||||
|
||||
elif they_are(str):
|
||||
# Concatenate strings in prepend mode
|
||||
if prepend:
|
||||
return source + dest
|
||||
elif append:
|
||||
return dest + source
|
||||
|
||||
# If we reach here source and dest are either different types or are
|
||||
# not both lists or dicts: replace with source.
|
||||
return copy.copy(source)
|
||||
@@ -1183,6 +1282,17 @@ def process_config_path(path):
|
||||
front = syaml.syaml_str(front)
|
||||
front.override = True
|
||||
seen_override_in_path = True
|
||||
|
||||
elif front.endswith("+"):
|
||||
front = front.rstrip("+")
|
||||
front = syaml.syaml_str(front)
|
||||
front.prepend = True
|
||||
|
||||
elif front.endswith("-"):
|
||||
front = front.rstrip("-")
|
||||
front = syaml.syaml_str(front)
|
||||
front.append = True
|
||||
|
||||
result.append(front)
|
||||
return result
|
||||
|
||||
|
@@ -1525,7 +1525,7 @@ def _query(
|
||||
if not (start_date < inst_date < end_date):
|
||||
continue
|
||||
|
||||
if query_spec is any or rec.spec.satisfies(query_spec, strict=True):
|
||||
if query_spec is any or rec.spec.satisfies(query_spec):
|
||||
results.append(rec.spec)
|
||||
|
||||
return results
|
||||
|
@@ -29,7 +29,6 @@
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
#: Information on a package that has been detected
|
||||
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
|
||||
|
||||
@@ -184,7 +183,7 @@ def library_prefix(library_dir):
|
||||
elif "lib" in lowered_components:
|
||||
idx = lowered_components.index("lib")
|
||||
return os.sep.join(components[:idx])
|
||||
elif is_windows and "bin" in lowered_components:
|
||||
elif sys.platform == "win32" and "bin" in lowered_components:
|
||||
idx = lowered_components.index("bin")
|
||||
return os.sep.join(components[:idx])
|
||||
else:
|
||||
@@ -260,13 +259,13 @@ def find_windows_compiler_bundled_packages():
|
||||
|
||||
|
||||
class WindowsKitExternalPaths(object):
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots():
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
@@ -359,7 +358,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
pkg (spack.package_base.PackageBase): package for which
|
||||
Program Files location is to be computed
|
||||
"""
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
# note windows paths are fine here as this method should only ever be invoked
|
||||
# to interact with Windows
|
||||
@@ -379,7 +378,7 @@ def compute_windows_user_path_for_package(pkg):
|
||||
installs see:
|
||||
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
|
||||
"""
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
|
||||
# Current user directory
|
||||
|
@@ -31,8 +31,6 @@
|
||||
path_to_dict,
|
||||
)
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def common_windows_package_paths():
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
@@ -57,7 +55,7 @@ def executables_in_path(path_hints):
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
path_hints.extend(common_windows_package_paths())
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
return path_to_dict(search_paths)
|
||||
@@ -149,7 +147,7 @@ def by_library(packages_to_check, path_hints=None):
|
||||
|
||||
path_to_lib_name = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
||||
if not is_windows
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints)
|
||||
)
|
||||
|
||||
|
@@ -21,7 +21,6 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
@@ -346,7 +345,7 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly, change permissions before attempting to remove
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
kwargs = {
|
||||
"ignore_errors": False,
|
||||
"onerror": fs.readonly_file_handler(ignore_errors=False),
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import List, Optional
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
@@ -59,7 +60,7 @@
|
||||
|
||||
|
||||
#: currently activated environment
|
||||
_active_environment = None
|
||||
_active_environment: Optional["Environment"] = None
|
||||
|
||||
|
||||
#: default path where environments are stored in the spack tree
|
||||
@@ -349,7 +350,8 @@ def _is_dev_spec_and_has_changed(spec):
|
||||
|
||||
def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
"""Check whether the current spec needs to be overwritten because either it has
|
||||
changed itself or one of its dependencies have changed"""
|
||||
changed itself or one of its dependencies have changed
|
||||
"""
|
||||
# if it's not installed, we don't need to overwrite it
|
||||
if not spec.installed:
|
||||
return False
|
||||
@@ -1551,12 +1553,11 @@ def update_default_view(self, viewpath):
|
||||
|
||||
def regenerate_views(self):
|
||||
if not self.views:
|
||||
tty.debug("Skip view update, this environment does not" " maintain a view")
|
||||
tty.debug("Skip view update, this environment does not maintain a view")
|
||||
return
|
||||
|
||||
concretized_root_specs = [s for _, s in self.concretized_specs()]
|
||||
for view in self.views.values():
|
||||
view.regenerate(concretized_root_specs)
|
||||
view.regenerate(self.concrete_roots())
|
||||
|
||||
def check_views(self):
|
||||
"""Checks if the environments default view can be activated."""
|
||||
@@ -1564,7 +1565,7 @@ def check_views(self):
|
||||
# This is effectively a no-op, but it touches all packages in the
|
||||
# default view if they are installed.
|
||||
for view_name, view in self.views.items():
|
||||
for _, spec in self.concretized_specs():
|
||||
for spec in self.concrete_roots():
|
||||
if spec in view and spec.package and spec.installed:
|
||||
msg = '{0} in view "{1}"'
|
||||
tty.debug(msg.format(spec.name, view_name))
|
||||
@@ -1582,7 +1583,7 @@ def _env_modifications_for_default_view(self, reverse=False):
|
||||
visited = set()
|
||||
|
||||
errors = []
|
||||
for _, root_spec in self.concretized_specs():
|
||||
for root_spec in self.concrete_roots():
|
||||
if root_spec in self.default_view and root_spec.installed and root_spec.package:
|
||||
for spec in root_spec.traverse(deptype="run", root=True):
|
||||
if spec.name in visited:
|
||||
@@ -1799,9 +1800,6 @@ def install_specs(self, specs=None, **install_args):
|
||||
"Could not install log links for {0}: {1}".format(spec.name, str(e))
|
||||
)
|
||||
|
||||
with self.write_transaction():
|
||||
self.regenerate_views()
|
||||
|
||||
def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
@@ -1846,6 +1844,11 @@ def concretized_specs(self):
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def concrete_roots(self):
|
||||
"""Same as concretized_specs, except it returns the list of concrete
|
||||
roots *without* associated user spec"""
|
||||
return [root for _, root in self.concretized_specs()]
|
||||
|
||||
def get_by_hash(self, dag_hash):
|
||||
matches = {}
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
@@ -1862,6 +1865,16 @@ def get_one_by_hash(self, dag_hash):
|
||||
assert len(hash_matches) == 1
|
||||
return hash_matches[0]
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
key = lambda s: s.dag_hash()
|
||||
return [
|
||||
s
|
||||
for s in spack.traverse.traverse_nodes(self.concrete_roots(), key=key)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
def matching_spec(self, spec):
|
||||
"""
|
||||
Given a spec (likely not concretized), find a matching concretized
|
||||
@@ -1879,59 +1892,60 @@ def matching_spec(self, spec):
|
||||
and multiple dependency specs match, then this raises an error
|
||||
and reports all matching specs.
|
||||
"""
|
||||
# Root specs will be keyed by concrete spec, value abstract
|
||||
# Dependency-only specs will have value None
|
||||
matches = {}
|
||||
env_root_to_user = {root.dag_hash(): user for user, root in self.concretized_specs()}
|
||||
root_matches, dep_matches = [], []
|
||||
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
# Deal with concrete specs differently
|
||||
if spec.concrete:
|
||||
if spec in concretized_user_spec:
|
||||
matches[spec] = spec
|
||||
for env_spec in spack.traverse.traverse_nodes(
|
||||
specs=[root for _, root in self.concretized_specs()],
|
||||
key=lambda s: s.dag_hash(),
|
||||
order="breadth",
|
||||
):
|
||||
if not env_spec.satisfies(spec):
|
||||
continue
|
||||
|
||||
if concretized_user_spec.satisfies(spec):
|
||||
matches[concretized_user_spec] = user_spec
|
||||
for dep_spec in concretized_user_spec.traverse(root=False):
|
||||
if dep_spec.satisfies(spec):
|
||||
# Don't overwrite the abstract spec if present
|
||||
# If not present already, set to None
|
||||
matches[dep_spec] = matches.get(dep_spec, None)
|
||||
# If the spec is concrete, then there is no possibility of multiple matches,
|
||||
# and we immediately return the single match
|
||||
if spec.concrete:
|
||||
return env_spec
|
||||
|
||||
if not matches:
|
||||
# Distinguish between environment roots and deps. Specs that are both
|
||||
# are classified as environment roots.
|
||||
user_spec = env_root_to_user.get(env_spec.dag_hash())
|
||||
if user_spec:
|
||||
root_matches.append((env_spec, user_spec))
|
||||
else:
|
||||
dep_matches.append(env_spec)
|
||||
|
||||
# No matching spec
|
||||
if not root_matches and not dep_matches:
|
||||
return None
|
||||
elif len(matches) == 1:
|
||||
return list(matches.keys())[0]
|
||||
|
||||
root_matches = dict(
|
||||
(concrete, abstract) for concrete, abstract in matches.items() if abstract
|
||||
)
|
||||
|
||||
# Single root spec, any number of dep specs => return root spec.
|
||||
if len(root_matches) == 1:
|
||||
return list(root_matches.items())[0][0]
|
||||
return root_matches[0][0]
|
||||
|
||||
if not root_matches and len(dep_matches) == 1:
|
||||
return dep_matches[0]
|
||||
|
||||
# More than one spec matched, and either multiple roots matched or
|
||||
# none of the matches were roots
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
match_strings = []
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
for concrete, abstract in matches.items():
|
||||
if abstract:
|
||||
s = "Root spec %s\n %s" % (abstract, concrete.format(fmt_str))
|
||||
else:
|
||||
s = "Dependency spec\n %s" % concrete.format(fmt_str)
|
||||
match_strings.append(s)
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
for concrete, abstract in root_matches
|
||||
]
|
||||
match_strings.extend(
|
||||
f"Dependency spec\n {s.format(fmt_str, color=color)}" for s in dep_matches
|
||||
)
|
||||
matches_str = "\n".join(match_strings)
|
||||
|
||||
msg = "{0} matches multiple specs in the environment {1}: \n" "{2}".format(
|
||||
str(spec), self.name, matches_str
|
||||
raise SpackEnvironmentError(
|
||||
f"{spec} matches multiple specs in the environment {self.name}: \n{matches_str}"
|
||||
)
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
def removed_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all specs that will be
|
||||
@@ -2192,6 +2206,7 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
|
||||
view = dict((name, view.to_dict()) for name, view in self.views.items())
|
||||
else:
|
||||
view = False
|
||||
|
||||
yaml_dict["view"] = view
|
||||
|
||||
if self.dev_specs:
|
||||
@@ -2313,7 +2328,7 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
invalid_deps = [
|
||||
c
|
||||
for c in spec_constraints
|
||||
if any(c.satisfies(invd, strict=True) for invd in invalid_deps_string)
|
||||
if any(c.satisfies(invd) for invd in invalid_deps_string)
|
||||
]
|
||||
if len(invalid_deps) != len(invalid_deps_string):
|
||||
raise e
|
||||
|
@@ -28,7 +28,6 @@
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -53,7 +52,6 @@
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
|
||||
"The contents of {subject} look like {content_type}. Either the URL"
|
||||
@@ -1503,7 +1501,7 @@ def _from_merged_attrs(fetcher, pkg, version):
|
||||
return fetcher(**attrs)
|
||||
|
||||
|
||||
def for_package_version(pkg, version):
|
||||
def for_package_version(pkg, version=None):
|
||||
"""Determine a fetch strategy based on the arguments supplied to
|
||||
version() in the package description."""
|
||||
|
||||
@@ -1514,8 +1512,18 @@ def for_package_version(pkg, version):
|
||||
|
||||
check_pkg_attributes(pkg)
|
||||
|
||||
if not isinstance(version, spack.version.VersionBase):
|
||||
version = spack.version.Version(version)
|
||||
if version is not None:
|
||||
assert not pkg.spec.concrete, "concrete specs should not pass the 'version=' argument"
|
||||
# Specs are initialized with the universe range, if no version information is given,
|
||||
# so here we make sure we always match the version passed as argument
|
||||
if not isinstance(version, spack.version.VersionBase):
|
||||
version = spack.version.Version(version)
|
||||
|
||||
version_list = spack.version.VersionList()
|
||||
version_list.add(version)
|
||||
pkg.spec.versions = version_list
|
||||
else:
|
||||
version = pkg.version
|
||||
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
|
@@ -30,8 +30,7 @@
|
||||
|
||||
#: Groupdb does not exist on Windows, prevent imports
|
||||
#: on supported systems
|
||||
is_windows = sys.platform == "win32"
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
|
||||
#: Spack itself also limits the shebang line to at most 4KB, which should be plenty.
|
||||
|
@@ -84,9 +84,6 @@
|
||||
#: queue invariants).
|
||||
STATUS_REMOVED = "removed"
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
is_osx = sys.platform == "darwin"
|
||||
|
||||
|
||||
class InstallAction(object):
|
||||
#: Don't perform an install
|
||||
@@ -169,9 +166,9 @@ def _do_fake_install(pkg):
|
||||
if not pkg.name.startswith("lib"):
|
||||
library = "lib" + library
|
||||
|
||||
plat_shared = ".dll" if is_windows else ".so"
|
||||
plat_static = ".lib" if is_windows else ".a"
|
||||
dso_suffix = ".dylib" if is_osx else plat_shared
|
||||
plat_shared = ".dll" if sys.platform == "win32" else ".so"
|
||||
plat_static = ".lib" if sys.platform == "win32" else ".a"
|
||||
dso_suffix = ".dylib" if sys.platform == "darwin" else plat_shared
|
||||
|
||||
# Install fake command
|
||||
fs.mkdirp(pkg.prefix.bin)
|
||||
|
@@ -575,7 +575,7 @@ def setup_main_options(args):
|
||||
if args.debug:
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set("config:debug", True, scope="command_line")
|
||||
spack.util.environment.tracing_enabled = True
|
||||
spack.util.environment.TRACING_ENABLED = True
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
@@ -492,7 +492,7 @@ def get_matching_versions(specs, num_versions=1):
|
||||
break
|
||||
|
||||
# Generate only versions that satisfy the spec.
|
||||
if spec.concrete or v.satisfies(spec.versions):
|
||||
if spec.concrete or v.intersects(spec.versions):
|
||||
s = spack.spec.Spec(pkg.name)
|
||||
s.versions = VersionList([v])
|
||||
s.variants = spec.variants.copy()
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This package contains code for creating environment modules, which can
|
||||
include TCL non-hierarchical modules, LUA hierarchical modules, and others.
|
||||
include Tcl non-hierarchical modules, Lua hierarchical modules, and others.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
@@ -207,7 +207,7 @@ def merge_config_rules(configuration, spec):
|
||||
# evaluated in order of appearance in the module file
|
||||
spec_configuration = module_specific_configuration.pop("all", {})
|
||||
for constraint, action in module_specific_configuration.items():
|
||||
if spec.satisfies(constraint, strict=True):
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
update_dictionary_extending_lists(spec_configuration, action)
|
||||
|
@@ -71,7 +71,7 @@ def guess_core_compilers(name, store=False):
|
||||
# A compiler is considered to be a core compiler if any of the
|
||||
# C, C++ or Fortran compilers reside in a system directory
|
||||
is_system_compiler = any(
|
||||
os.path.dirname(x) in spack.util.environment.system_dirs
|
||||
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in compiler["paths"].values()
|
||||
if x is not None
|
||||
)
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This module implements the classes necessary to generate TCL
|
||||
"""This module implements the classes necessary to generate Tcl
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import posixpath
|
||||
@@ -19,7 +19,7 @@
|
||||
from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFileWriter
|
||||
|
||||
|
||||
#: TCL specific part of the configuration
|
||||
#: Tcl specific part of the configuration
|
||||
def configuration(module_set_name):
|
||||
config_path = "modules:%s:tcl" % module_set_name
|
||||
config = spack.config.get(config_path, {})
|
||||
|
@@ -36,7 +36,7 @@
|
||||
cmake_cache_path,
|
||||
cmake_cache_string,
|
||||
)
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
|
@@ -92,9 +92,6 @@
|
||||
_spack_configure_argsfile = "spack-configure-args.txt"
|
||||
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def deprecated_version(pkg, version):
|
||||
"""Return True if the version is deprecated, False otherwise.
|
||||
|
||||
@@ -165,7 +162,7 @@ def windows_establish_runtime_linkage(self):
|
||||
|
||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||
self.win_rpath.establish_link()
|
||||
@@ -210,7 +207,7 @@ def to_windows_exe(exe):
|
||||
plat_exe = []
|
||||
if hasattr(cls, "executables"):
|
||||
for exe in cls.executables:
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
exe = to_windows_exe(exe)
|
||||
plat_exe.append(exe)
|
||||
return plat_exe
|
||||
@@ -1200,7 +1197,7 @@ def _make_fetcher(self):
|
||||
# one element (the root package). In case there are resources
|
||||
# associated with the package, append their fetcher to the
|
||||
# composite.
|
||||
root_fetcher = fs.for_package_version(self, self.version)
|
||||
root_fetcher = fs.for_package_version(self)
|
||||
fetcher = fs.FetchStrategyComposite() # Composite fetcher
|
||||
fetcher.append(root_fetcher) # Root fetcher is always present
|
||||
resources = self._get_needed_resources()
|
||||
@@ -1311,7 +1308,7 @@ def provides(self, vpkg_name):
|
||||
True if this package provides a virtual package with the specified name
|
||||
"""
|
||||
return any(
|
||||
any(self.spec.satisfies(c) for c in constraints)
|
||||
any(self.spec.intersects(c) for c in constraints)
|
||||
for s, constraints in self.provided.items()
|
||||
if s.name == vpkg_name
|
||||
)
|
||||
@@ -1617,7 +1614,7 @@ def content_hash(self, content=None):
|
||||
# TODO: resources
|
||||
if self.spec.versions.concrete:
|
||||
try:
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
source_id = fs.for_package_version(self).source_id()
|
||||
except (fs.ExtrapolationError, fs.InvalidArgsError):
|
||||
# ExtrapolationError happens if the package has no fetchers defined.
|
||||
# InvalidArgsError happens when there are version directives with args,
|
||||
@@ -1780,7 +1777,7 @@ def _get_needed_resources(self):
|
||||
# conflict with the spec, so we need to invoke
|
||||
# when_spec.satisfies(self.spec) vs.
|
||||
# self.spec.satisfies(when_spec)
|
||||
if when_spec.satisfies(self.spec, strict=False):
|
||||
if when_spec.intersects(self.spec):
|
||||
resources.extend(resource_list)
|
||||
# Sorts the resources by the length of the string representing their
|
||||
# destination. Since any nested resource must contain another
|
||||
@@ -2401,7 +2398,7 @@ def rpath(self):
|
||||
|
||||
# on Windows, libraries of runtime interest are typically
|
||||
# stored in the bin directory
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
rpaths = [self.prefix.bin]
|
||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
||||
else:
|
||||
|
@@ -73,7 +73,7 @@ def __call__(self, spec):
|
||||
# integer is the index of the first spec in order that satisfies
|
||||
# spec, or it's a number larger than any position in the order.
|
||||
match_index = next(
|
||||
(i for i, s in enumerate(spec_order) if spec.satisfies(s)), len(spec_order)
|
||||
(i for i, s in enumerate(spec_order) if spec.intersects(s)), len(spec_order)
|
||||
)
|
||||
if match_index < len(spec_order) and spec_order[match_index] == spec:
|
||||
# If this is called with multiple specs that all satisfy the same
|
||||
@@ -185,7 +185,7 @@ def _package(maybe_abstract_spec):
|
||||
),
|
||||
extra_attributes=entry.get("extra_attributes", {}),
|
||||
)
|
||||
if external_spec.satisfies(spec):
|
||||
if external_spec.intersects(spec):
|
||||
external_specs.append(external_spec)
|
||||
|
||||
# Defensively copy returned specs
|
||||
|
@@ -37,7 +37,7 @@
|
||||
|
||||
|
||||
def slingshot_network():
|
||||
return os.path.exists("/lib64/libcxi.so")
|
||||
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
|
||||
|
||||
|
||||
def _target_name_from_craype_target_name(name):
|
||||
|
@@ -10,7 +10,7 @@ def get_projection(projections, spec):
|
||||
"""
|
||||
all_projection = None
|
||||
for spec_like, projection in projections.items():
|
||||
if spec.satisfies(spec_like, strict=True):
|
||||
if spec.satisfies(spec_like):
|
||||
return projection
|
||||
elif spec_like == "all":
|
||||
all_projection = projection
|
||||
|
@@ -72,7 +72,7 @@ def providers_for(self, virtual_spec):
|
||||
# Add all the providers that satisfy the vpkg spec.
|
||||
if virtual_spec.name in self.providers:
|
||||
for p_spec, spec_set in self.providers[virtual_spec.name].items():
|
||||
if p_spec.satisfies(virtual_spec, deps=False):
|
||||
if p_spec.intersects(virtual_spec, deps=False):
|
||||
result.update(spec_set)
|
||||
|
||||
# Return providers in order. Defensively copy.
|
||||
@@ -186,7 +186,7 @@ def update(self, spec):
|
||||
provider_spec = provider_spec_readonly.copy()
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()
|
||||
|
||||
if spec.satisfies(provider_spec, deps=False):
|
||||
if spec.intersects(provider_spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
|
||||
provider_map = self.providers.setdefault(provided_name, {})
|
||||
|
@@ -15,7 +15,8 @@
|
||||
"cdash": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["build-group", "url", "project", "site"],
|
||||
# "required": ["build-group", "url", "project", "site"],
|
||||
"required": ["build-group"],
|
||||
"patternProperties": {
|
||||
r"build-group": {"type": "string"},
|
||||
r"url": {"type": "string"},
|
||||
|
181
lib/spack/spack/schema/ci.py
Normal file
181
lib/spack/spack/schema/ci.py
Normal file
@@ -0,0 +1,181 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for gitlab-ci.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/ci.py
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
# Schema for script fields
|
||||
# List of lists and/or strings
|
||||
# This is similar to what is allowed in
|
||||
# the gitlab schema
|
||||
script_schema = {
|
||||
"type": "array",
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]},
|
||||
}
|
||||
|
||||
# Additional attributes are allow
|
||||
# and will be forwarded directly to the
|
||||
# CI target YAML for each job.
|
||||
attributes_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"image": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}},
|
||||
},
|
||||
"before_script": script_schema,
|
||||
"script": script_schema,
|
||||
"after_script": script_schema,
|
||||
},
|
||||
}
|
||||
|
||||
submapping_schema = {
|
||||
"type": "object",
|
||||
"additinoalProperties": False,
|
||||
"required": ["submapping"],
|
||||
"properties": {
|
||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||
"submapping": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["match"],
|
||||
"properties": {
|
||||
"match": {"type": "array", "items": {"type": "string"}},
|
||||
"build-job": attributes_schema,
|
||||
"build-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
named_attributes_schema = {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"noop-job": attributes_schema, "noop-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"build-job": attributes_schema, "build-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"reindex-job": attributes_schema,
|
||||
"reindex-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"signing-job": attributes_schema,
|
||||
"signing-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cleanup-job": attributes_schema,
|
||||
"cleanup-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"any-job": attributes_schema, "any-job-remove": attributes_schema},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
pipeline_gen_schema = {
|
||||
"type": "array",
|
||||
"items": {"oneOf": [submapping_schema, named_attributes_schema]},
|
||||
}
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
{
|
||||
"pipeline-gen": pipeline_gen_schema,
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
"target": {"type": "string", "enum": ["gitlab"], "default": "gitlab"},
|
||||
}
|
||||
)
|
||||
|
||||
ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {"ci": ci_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack CI configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
@@ -14,7 +14,9 @@
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"reuse": {"type": "boolean"},
|
||||
"reuse": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["dependencies"]}]
|
||||
},
|
||||
"enable_node_namespace": {"type": "boolean"},
|
||||
"targets": {
|
||||
"type": "object",
|
||||
|
@@ -12,11 +12,11 @@
|
||||
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.container
|
||||
import spack.schema.gitlab_ci
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -31,7 +31,7 @@
|
||||
spack.schema.concretizer.properties,
|
||||
spack.schema.config.properties,
|
||||
spack.schema.container.properties,
|
||||
spack.schema.gitlab_ci.properties,
|
||||
spack.schema.ci.properties,
|
||||
spack.schema.mirrors.properties,
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
|
@@ -32,11 +32,16 @@
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"one_of": {"type": "array"},
|
||||
"any_of": {"type": "array"},
|
||||
},
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"one_of": {"type": "array"},
|
||||
"any_of": {"type": "array"},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
},
|
||||
# Shorthand for a single requirement group with
|
||||
|
@@ -818,6 +818,9 @@ def __init__(self, tests=False):
|
||||
self.compiler_version_constraints = set()
|
||||
self.post_facts = []
|
||||
|
||||
# (ID, CompilerSpec) -> dictionary of attributes
|
||||
self.compiler_info = collections.defaultdict(dict)
|
||||
|
||||
# hashes we've already added facts for
|
||||
self.seen_hashes = set()
|
||||
self.reusable_and_possible = {}
|
||||
@@ -942,54 +945,38 @@ def conflict_rules(self, pkg):
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
def compiler_facts(self):
|
||||
"""Facts about available compilers."""
|
||||
|
||||
self.gen.h2("Available compilers")
|
||||
compilers = self.possible_compilers
|
||||
indexed_possible_compilers = list(enumerate(self.possible_compilers))
|
||||
for compiler_id, compiler in indexed_possible_compilers:
|
||||
self.gen.fact(fn.compiler_id(compiler_id))
|
||||
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
|
||||
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
|
||||
|
||||
compiler_versions = collections.defaultdict(lambda: set())
|
||||
for compiler in compilers:
|
||||
compiler_versions[compiler.name].add(compiler.version)
|
||||
if compiler.operating_system:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
|
||||
|
||||
for compiler in sorted(compiler_versions):
|
||||
for v in sorted(compiler_versions[compiler]):
|
||||
self.gen.fact(fn.compiler_version(compiler, v))
|
||||
if compiler.target is not None:
|
||||
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
|
||||
|
||||
for flag_type, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_defaults(self):
|
||||
"""Set compiler defaults, given a list of possible compilers."""
|
||||
self.gen.h2("Default compiler preferences")
|
||||
# Set compiler defaults, given a list of possible compilers
|
||||
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
|
||||
|
||||
compiler_list = self.possible_compilers.copy()
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
matches = sorted(compiler_list, key=ppk)
|
||||
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
|
||||
|
||||
for i, cspec in enumerate(matches):
|
||||
f = fn.default_compiler_preference(cspec.name, cspec.version, i)
|
||||
for weight, (compiler_id, cspec) in enumerate(matches):
|
||||
f = fn.default_compiler_preference(compiler_id, weight)
|
||||
self.gen.fact(f)
|
||||
|
||||
# Enumerate target families. This may be redundant, but compilers with
|
||||
# custom versions will be able to concretize properly.
|
||||
for entry in spack.compilers.all_compilers_config():
|
||||
compiler_entry = entry["compiler"]
|
||||
cspec = spack.spec.CompilerSpec(compiler_entry["spec"])
|
||||
if not compiler_entry.get("target", None):
|
||||
continue
|
||||
|
||||
self.gen.fact(
|
||||
fn.compiler_supports_target(cspec.name, cspec.version, compiler_entry["target"])
|
||||
)
|
||||
|
||||
def compiler_supports_os(self):
|
||||
compilers_yaml = spack.compilers.all_compilers_config()
|
||||
for entry in compilers_yaml:
|
||||
c = spack.spec.CompilerSpec(entry["compiler"]["spec"])
|
||||
operating_system = entry["compiler"]["operating_system"]
|
||||
self.gen.fact(fn.compiler_supports_os(c.name, c.version, operating_system))
|
||||
|
||||
def package_compiler_defaults(self, pkg):
|
||||
"""Facts about packages' compiler prefs."""
|
||||
|
||||
@@ -998,14 +985,16 @@ def package_compiler_defaults(self, pkg):
|
||||
if not pkg_prefs or "compiler" not in pkg_prefs:
|
||||
return
|
||||
|
||||
compiler_list = self.possible_compilers.copy()
|
||||
compiler_list = self.possible_compilers
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False)
|
||||
matches = sorted(compiler_list, key=ppk)
|
||||
matches = sorted(compiler_list, key=lambda x: ppk(x.spec))
|
||||
|
||||
for i, cspec in enumerate(reversed(matches)):
|
||||
for i, compiler in enumerate(reversed(matches)):
|
||||
self.gen.fact(
|
||||
fn.node_compiler_preference(pkg.name, cspec.name, cspec.version, -i * 100)
|
||||
fn.node_compiler_preference(
|
||||
pkg.name, compiler.spec.name, compiler.spec.version, -i * 100
|
||||
)
|
||||
)
|
||||
|
||||
def package_requirement_rules(self, pkg):
|
||||
@@ -1028,9 +1017,14 @@ def _rules_from_requirements(self, pkg_name, requirements):
|
||||
else:
|
||||
rules = []
|
||||
for requirement in requirements:
|
||||
for policy in ("one_of", "any_of"):
|
||||
if policy in requirement:
|
||||
rules.append((pkg_name, policy, requirement[policy]))
|
||||
if isinstance(requirement, str):
|
||||
# A string represents a spec that must be satisfied. It is
|
||||
# equivalent to a one_of group with a single element
|
||||
rules.append((pkg_name, "one_of", [requirement]))
|
||||
else:
|
||||
for policy in ("one_of", "any_of"):
|
||||
if policy in requirement:
|
||||
rules.append((pkg_name, policy, requirement[policy]))
|
||||
return rules
|
||||
|
||||
def pkg_rules(self, pkg, tests):
|
||||
@@ -1392,23 +1386,6 @@ def target_preferences(self, pkg_name):
|
||||
fn.target_weight(pkg_name, str(preferred.architecture.target), i + offset)
|
||||
)
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
|
||||
# types of flags that can be on specs
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
# flags from compilers.yaml
|
||||
compilers = all_compilers_in_config()
|
||||
for compiler in compilers:
|
||||
for name, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(
|
||||
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
|
||||
)
|
||||
|
||||
def spec_clauses(self, *args, **kwargs):
|
||||
"""Wrap a call to `_spec_clauses()` into a try/except block that
|
||||
raises a comprehensible error message in case of failure.
|
||||
@@ -1458,6 +1435,7 @@ class Head(object):
|
||||
node_compiler = fn.attr("node_compiler_set")
|
||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||
node_flag = fn.attr("node_flag_set")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagate = fn.attr("variant_propagate")
|
||||
|
||||
@@ -1471,6 +1449,7 @@ class Body(object):
|
||||
node_compiler = fn.attr("node_compiler")
|
||||
node_compiler_version = fn.attr("node_compiler_version")
|
||||
node_flag = fn.attr("node_flag")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagate = fn.attr("variant_propagate")
|
||||
|
||||
@@ -1552,6 +1531,7 @@ class Body(object):
|
||||
for flag_type, flags in spec.compiler_flags.items():
|
||||
for flag in flags:
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
||||
|
||||
@@ -1762,8 +1742,6 @@ def target_defaults(self, specs):
|
||||
if granularity == "generic":
|
||||
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
|
||||
|
||||
compilers = self.possible_compilers
|
||||
|
||||
# Add targets explicitly requested from specs
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
@@ -1780,8 +1758,14 @@ def target_defaults(self, specs):
|
||||
if ancestor not in candidate_targets:
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = set([uarch.family.name])
|
||||
for compiler in sorted(compilers):
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
# Stub support for cross-compilation, to be expanded later
|
||||
if compiler.target is not None and compiler.target != str(uarch.family):
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
|
||||
self.gen.newline()
|
||||
continue
|
||||
|
||||
supported = self._supported_targets(compiler.name, compiler.version, candidate_targets)
|
||||
|
||||
# If we can't find supported targets it may be due to custom
|
||||
@@ -1789,10 +1773,8 @@ def target_defaults(self, specs):
|
||||
# real_version from the compiler object to get more accurate
|
||||
# results.
|
||||
if not supported:
|
||||
compiler_obj = spack.compilers.compilers_for_spec(compiler)
|
||||
compiler_obj = compiler_obj[0]
|
||||
supported = self._supported_targets(
|
||||
compiler.name, compiler_obj.real_version, candidate_targets
|
||||
compiler.name, compiler.real_version, candidate_targets
|
||||
)
|
||||
|
||||
if not supported:
|
||||
@@ -1800,20 +1782,19 @@ def target_defaults(self, specs):
|
||||
|
||||
for target in supported:
|
||||
best_targets.add(target.name)
|
||||
self.gen.fact(
|
||||
fn.compiler_supports_target(compiler.name, compiler.version, target.name)
|
||||
)
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, target.name))
|
||||
|
||||
self.gen.fact(
|
||||
fn.compiler_supports_target(compiler.name, compiler.version, uarch.family.name)
|
||||
)
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, uarch.family.name))
|
||||
self.gen.newline()
|
||||
|
||||
i = 0 # TODO compute per-target offset?
|
||||
for target in candidate_targets:
|
||||
self.gen.fact(fn.target(target.name))
|
||||
self.gen.fact(fn.target_family(target.name, target.family.name))
|
||||
for parent in sorted(target.parents):
|
||||
self.gen.fact(fn.target_parent(target.name, parent.name))
|
||||
self.gen.fact(fn.target_compatible(target.name, target.name))
|
||||
# Code for ancestor can run on target
|
||||
for ancestor in target.ancestors:
|
||||
self.gen.fact(fn.target_compatible(target.name, ancestor.name))
|
||||
|
||||
# prefer best possible targets; weight others poorly so
|
||||
# they're not used unless set explicitly
|
||||
@@ -1824,10 +1805,10 @@ def target_defaults(self, specs):
|
||||
i += 1
|
||||
else:
|
||||
self.default_targets.append((100, target.name))
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
self.gen.newline()
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
|
||||
def virtual_providers(self):
|
||||
self.gen.h2("Virtual providers")
|
||||
msg = (
|
||||
@@ -1843,6 +1824,22 @@ def virtual_providers(self):
|
||||
|
||||
def generate_possible_compilers(self, specs):
|
||||
compilers = all_compilers_in_config()
|
||||
|
||||
# Search for compilers which differs only by aspects that are
|
||||
# not selectable by users using the spec syntax
|
||||
seen, sanitized_list = set(), []
|
||||
for compiler in compilers:
|
||||
key = compiler.spec, compiler.operating_system, compiler.target
|
||||
if key in seen:
|
||||
warnings.warn(
|
||||
f"duplicate found for {compiler.spec} on "
|
||||
f"{compiler.operating_system}/{compiler.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
sanitized_list.append(compiler)
|
||||
seen.add(key)
|
||||
|
||||
cspecs = set([c.spec for c in compilers])
|
||||
|
||||
# add compiler specs from the input line to possibilities if we
|
||||
@@ -1863,15 +1860,27 @@ def generate_possible_compilers(self, specs):
|
||||
# Allow unknown compilers to exist if the associated spec
|
||||
# is already built
|
||||
else:
|
||||
cspecs.add(s.compiler)
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compilers.append(
|
||||
compiler_cls(
|
||||
s.compiler, operating_system=None, target=None, paths=[None] * 4
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.allow_compiler(s.compiler.name, s.compiler.version))
|
||||
|
||||
return cspecs
|
||||
return list(
|
||||
sorted(
|
||||
compilers,
|
||||
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
def define_version_constraints(self):
|
||||
"""Define what version_satisfies(...) means in ASP logic."""
|
||||
for pkg_name, versions in sorted(self.version_constraints):
|
||||
# version must be *one* of the ones the spec allows.
|
||||
# Also, "possible versions" contain only concrete versions, so satisfies is appropriate
|
||||
allowed_versions = [
|
||||
v for v in sorted(self.possible_versions[pkg_name]) if v.satisfies(versions)
|
||||
]
|
||||
@@ -1923,14 +1932,12 @@ def versions_for(v):
|
||||
self.possible_versions[pkg_name].add(version)
|
||||
|
||||
def define_compiler_version_constraints(self):
|
||||
compiler_list = spack.compilers.all_compiler_specs()
|
||||
compiler_list = list(sorted(set(compiler_list)))
|
||||
for constraint in sorted(self.compiler_version_constraints):
|
||||
for compiler in compiler_list:
|
||||
if compiler.satisfies(constraint):
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
if compiler.spec.satisfies(constraint):
|
||||
self.gen.fact(
|
||||
fn.compiler_version_satisfies(
|
||||
constraint.name, constraint.versions, compiler.version
|
||||
constraint.name, constraint.versions, compiler_id
|
||||
)
|
||||
)
|
||||
self.gen.newline()
|
||||
@@ -2091,10 +2098,13 @@ def setup(self, driver, specs, reuse=None):
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
|
||||
self.gen.h1("Possible flags on nodes")
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
self.gen.h1("General Constraints")
|
||||
self.available_compilers()
|
||||
self.compiler_defaults()
|
||||
self.compiler_supports_os()
|
||||
self.compiler_facts()
|
||||
|
||||
# architecture defaults
|
||||
self.platform_defaults()
|
||||
@@ -2105,7 +2115,6 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.provider_defaults()
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
self.flag_defaults()
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
@@ -2176,6 +2185,7 @@ def __init__(self, specs, hash_lookup=None):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._hash_specs = []
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
self._flag_compiler_defaults = set()
|
||||
|
||||
@@ -2186,6 +2196,7 @@ def __init__(self, specs, hash_lookup=None):
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
self._hash_specs.append(pkg)
|
||||
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
@@ -2284,10 +2295,11 @@ def reorder_flags(self):
|
||||
flags will appear last on the compile line, in the order they
|
||||
were specified.
|
||||
|
||||
The solver determines wihch flags are on nodes; this routine
|
||||
The solver determines which flags are on nodes; this routine
|
||||
imposes order afterwards.
|
||||
"""
|
||||
compilers = dict((c.spec, c) for c in all_compilers_in_config())
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
for spec in self._specs.values():
|
||||
@@ -2310,8 +2322,8 @@ def reorder_flags(self):
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for source_name in sorted_sources:
|
||||
source = cmd_specs[source_name]
|
||||
for name in sorted_sources:
|
||||
source = self._specs[name] if name in self._hash_specs else cmd_specs[name]
|
||||
extend_flag_list(from_sources, source.compiler_flags.get(flag_type, []))
|
||||
|
||||
# compiler flags from compilers config are lowest precedence
|
||||
@@ -2386,10 +2398,12 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -2489,7 +2503,7 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def _reusable_specs(self):
|
||||
def _reusable_specs(self, specs):
|
||||
reusable_specs = []
|
||||
if self.reuse:
|
||||
# Specs from the local Database
|
||||
@@ -2511,6 +2525,13 @@ def _reusable_specs(self):
|
||||
# TODO: update mirror configuration so it can indicate that the
|
||||
# TODO: source cache (or any mirror really) doesn't have binaries.
|
||||
pass
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse == "dependencies":
|
||||
reusable_specs = [
|
||||
spec for spec in reusable_specs if not any(root in spec for root in specs)
|
||||
]
|
||||
|
||||
return reusable_specs
|
||||
|
||||
def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_only=False):
|
||||
@@ -2527,7 +2548,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs())
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
result, _, _ = self.driver.solve(setup, specs, reuse=reusable_specs, output=output)
|
||||
@@ -2550,7 +2571,7 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
tests (bool): add test dependencies to the solve
|
||||
"""
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs())
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
# Tell clingo that we don't have to solve all the inputs at once
|
||||
|
@@ -522,7 +522,7 @@ error(2, "{0} and {1} cannot both propagate variant '{2}' to package {3} with va
|
||||
attr("variant_propagate", Package, Variant, Value1, Source1),
|
||||
attr("variant_propagate", Package, Variant, Value2, Source2),
|
||||
variant(Package, Variant),
|
||||
Value1 != Value2.
|
||||
Value1 < Value2.
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
@@ -816,25 +816,15 @@ node_target_compatible(Package, Target)
|
||||
:- attr("node_target", Package, MyTarget),
|
||||
target_compatible(Target, MyTarget).
|
||||
|
||||
% target_compatible(T1, T2) means code for T2 can run on T1
|
||||
% This order is dependent -> dependency in the node DAG, which
|
||||
% is contravariant with the target DAG.
|
||||
target_compatible(Target, Target) :- target(Target).
|
||||
target_compatible(Child, Parent) :- target_parent(Child, Parent).
|
||||
target_compatible(Descendent, Ancestor)
|
||||
:- target_parent(Target, Ancestor),
|
||||
target_compatible(Descendent, Target),
|
||||
target(Target).
|
||||
|
||||
#defined target_satisfies/2.
|
||||
#defined target_parent/2.
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- attr("node_target", Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
attr("node_compiler", Package, Compiler),
|
||||
attr("node_compiler_version", Package, Compiler, Version),
|
||||
node_compiler(Package, CompilerID),
|
||||
not compiler_supports_target(CompilerID, Target),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
build(Package).
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
@@ -868,32 +858,44 @@ error(2, "'{0} target={1}' is not compatible with this machine", Package, Target
|
||||
%-----------------------------------------------------------------------------
|
||||
% Compiler semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
compiler(Compiler) :- compiler_version(Compiler, _).
|
||||
|
||||
% There must be only one compiler set per built node. The compiler
|
||||
% is chosen among available versions.
|
||||
{ attr("node_compiler_version", Package, Compiler, Version) : compiler_version(Compiler, Version) } :-
|
||||
% There must be only one compiler set per built node.
|
||||
{ node_compiler(Package, CompilerID) : compiler_id(CompilerID) } :-
|
||||
attr("node", Package),
|
||||
build(Package).
|
||||
|
||||
% Infer the compiler that matches a reused node
|
||||
node_compiler(Package, CompilerID)
|
||||
:- attr("node_compiler_version", Package, CompilerName, CompilerVersion),
|
||||
attr("node", Package),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, CompilerVersion),
|
||||
concrete(Package).
|
||||
|
||||
% Expand the internal attribute into "attr("node_compiler_version")
|
||||
attr("node_compiler_version", Package, CompilerName, CompilerVersion)
|
||||
:- node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, CompilerVersion),
|
||||
build(Package).
|
||||
|
||||
attr("node_compiler", Package, CompilerName)
|
||||
:- attr("node_compiler_version", Package, CompilerName, CompilerVersion).
|
||||
|
||||
error(2, "No valid compiler version found for '{0}'", Package)
|
||||
:- attr("node", Package),
|
||||
C = #count{ Version : attr("node_compiler_version", Package, _, Version)},
|
||||
C < 1.
|
||||
error(2, "'{0}' compiler constraints '%{1}@{2}' and '%{3}@{4}' are incompatible", Package, Compiler1, Version1, Compiler2, Version2)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version", Package, Compiler1, Version1),
|
||||
attr("node_compiler_version", Package, Compiler2, Version2),
|
||||
(Compiler1, Version1) < (Compiler2, Version2). % see[1]
|
||||
not node_compiler(Package, _).
|
||||
|
||||
% Sometimes we just need to know the compiler and not the version
|
||||
attr("node_compiler", Package, Compiler) :- attr("node_compiler_version", Package, Compiler, _).
|
||||
error(2, "Cannot concretize {0} with two compilers {1}@{2} and {3}@{4}", Package, C1, V1, C2, V2)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version", Package, C1, V1),
|
||||
attr("node_compiler_version", Package, C2, V2),
|
||||
(C1, V1) < (C2, V2). % see[1]
|
||||
|
||||
% We can't have a compiler be enforced and select the version from another compiler
|
||||
error(2, "Cannot concretize {0} with two compilers {1}@{2} and {3}@{4}", Package, C1, V1, C2, V2)
|
||||
:- attr("node_compiler_version", Package, C1, V1),
|
||||
attr("node_compiler_version", Package, C2, V2),
|
||||
(C1, V1) != (C2, V2).
|
||||
(C1, V1) < (C2, V2).
|
||||
|
||||
error(2, "Cannot concretize {0} with two compilers {1} and {2}@{3}", Package, Compiler1, Compiler2, Version)
|
||||
:- attr("node_compiler", Package, Compiler1),
|
||||
@@ -904,37 +906,47 @@ error(2, "Cannot concretize {0} with two compilers {1} and {2}@{3}", Package, Co
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, ":"),
|
||||
C = #count{ Version : attr("node_compiler_version", Package, Compiler, Version), compiler_version_satisfies(Compiler, ":", Version) },
|
||||
C < 1.
|
||||
not compiler_version_satisfies(Compiler, ":", _).
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Constraint),
|
||||
C = #count{ Version : attr("node_compiler_version", Package, Compiler, Version), compiler_version_satisfies(Compiler, Constraint, Version) },
|
||||
C < 1.
|
||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||
node_compiler(Package, ID).
|
||||
|
||||
% If the node is associated with a compiler and the compiler satisfy a constraint, then
|
||||
% the compiler associated with the node satisfy the same constraint
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Constraint)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version),
|
||||
compiler_version_satisfies(Compiler, Constraint, Version).
|
||||
:- node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version_satisfies(Compiler, Constraint, CompilerID).
|
||||
|
||||
#defined compiler_version_satisfies/3.
|
||||
|
||||
% If the compiler version was set from the command line,
|
||||
% respect it verbatim
|
||||
attr("node_compiler_version", Package, Compiler, Version) :-
|
||||
attr("node_compiler_version_set", Package, Compiler, Version).
|
||||
:- attr("node_compiler_version_set", Package, Compiler, Version),
|
||||
not attr("node_compiler_version", Package, Compiler, Version).
|
||||
|
||||
:- attr("node_compiler_set", Package, Compiler),
|
||||
not attr("node_compiler_version", Package, Compiler, _).
|
||||
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version),
|
||||
attr("node_os", Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
:- attr("node_os", Package, OS),
|
||||
node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
not compiler_os(CompilerID, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
@@ -942,8 +954,8 @@ error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler
|
||||
% same compiler there's a mismatch.
|
||||
compiler_match(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
attr("node_compiler_version", Package, Compiler, Version),
|
||||
attr("node_compiler_version", Dependency, Compiler, Version).
|
||||
node_compiler(Package, CompilerID),
|
||||
node_compiler(Dependency, CompilerID).
|
||||
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
@@ -955,25 +967,32 @@ compiler_mismatch_required(Package, Dependency)
|
||||
attr("node_compiler_set", Dependency, _),
|
||||
not compiler_match(Package, Dependency).
|
||||
|
||||
#defined compiler_supports_os/3.
|
||||
#defined compiler_os/3.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
compiler_weight(Package, Weight)
|
||||
:- attr("node_compiler_version", Package, Compiler, V),
|
||||
:- node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
node_compiler_preference(Package, Compiler, V, Weight).
|
||||
compiler_weight(Package, Weight)
|
||||
:- attr("node_compiler_version", Package, Compiler, V),
|
||||
:- node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
not node_compiler_preference(Package, Compiler, V, _),
|
||||
default_compiler_preference(Compiler, V, Weight).
|
||||
default_compiler_preference(CompilerID, Weight).
|
||||
compiler_weight(Package, 100)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version),
|
||||
not node_compiler_preference(Package, Compiler, Version, _),
|
||||
not default_compiler_preference(Compiler, Version, _).
|
||||
:- node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
not node_compiler_preference(Package, Compiler, V, _),
|
||||
not default_compiler_preference(CompilerID, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
error(2, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version), not compiler_version(Compiler, Version).
|
||||
:- attr("node_compiler_version", Package, Compiler, Version),
|
||||
not node_compiler(Package, _).
|
||||
|
||||
#defined node_compiler_preference/4.
|
||||
#defined default_compiler_preference/3.
|
||||
@@ -985,10 +1004,11 @@ error(2, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missin
|
||||
% propagate flags when compiler match
|
||||
can_inherit_flags(Package, Dependency, FlagType)
|
||||
:- depends_on(Package, Dependency),
|
||||
attr("node_compiler", Package, Compiler),
|
||||
attr("node_compiler", Dependency, Compiler),
|
||||
node_compiler(Package, CompilerID),
|
||||
node_compiler(Dependency, CompilerID),
|
||||
not attr("node_flag_set", Dependency, FlagType, _),
|
||||
compiler(Compiler), flag_type(FlagType).
|
||||
compiler_id(CompilerID),
|
||||
flag_type(FlagType).
|
||||
|
||||
node_flag_inherited(Dependency, FlagType, Flag)
|
||||
:- attr("node_flag_set", Package, FlagType, Flag), can_inherit_flags(Package, Dependency, FlagType),
|
||||
@@ -1005,7 +1025,7 @@ error(2, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source
|
||||
attr("node_flag_propagate", Source2, FlagType),
|
||||
can_inherit_flags(Source1, Package, FlagType),
|
||||
can_inherit_flags(Source2, Package, FlagType),
|
||||
Source1 != Source2.
|
||||
Source1 < Source2.
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", Package, FlagType, Package) :- attr("node_flag_set", Package, FlagType, _).
|
||||
@@ -1015,19 +1035,21 @@ attr("node_flag_source", Dependency, FlagType, Q)
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", Package, FlagType, Flag)
|
||||
:- compiler_version_flag(Compiler, Version, FlagType, Flag),
|
||||
attr("node_compiler_version", Package, Compiler, Version),
|
||||
:- compiler_flag(CompilerID, FlagType, Flag),
|
||||
node_compiler(Package, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler(Compiler),
|
||||
compiler_version(Compiler, Version).
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
attr("node_flag_compiler_default", Package)
|
||||
:- not attr("node_flag_set", Package, FlagType, _),
|
||||
compiler_version_flag(Compiler, Version, FlagType, Flag),
|
||||
attr("node_compiler_version", Package, Compiler, Version),
|
||||
compiler_flag(CompilerID, FlagType, Flag),
|
||||
node_compiler(Package, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler(Compiler),
|
||||
compiler_version(Compiler, Version).
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
% if a flag is set to something or inherited, it's included
|
||||
attr("node_flag", Package, FlagType, Flag) :- attr("node_flag_set", Package, FlagType, Flag).
|
||||
@@ -1038,7 +1060,7 @@ attr("node_flag", Package, FlagType, Flag)
|
||||
attr("no_flags", Package, FlagType)
|
||||
:- not attr("node_flag", Package, FlagType, _), attr("node", Package), flag_type(FlagType).
|
||||
|
||||
#defined compiler_version_flag/4.
|
||||
#defined compiler_flag/3.
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1054,7 +1076,7 @@ attr("no_flags", Package, FlagType)
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", Package, Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("hash", Package, Hash1), attr("hash", Package, Hash2), Hash1 != Hash2.
|
||||
:- attr("hash", Package, Hash1), attr("hash", Package, Hash2), Hash1 < Hash2.
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash) :- attr("hash", Package, Hash).
|
||||
@@ -1311,13 +1333,29 @@ opt_criterion(5, "non-preferred targets").
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
#heuristic attr("version", Package, Version) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
|
||||
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
|
||||
#heuristic attr("node_target", Package, Target) : package_target_weight(Target, Package, 0), attr("node", Package). [10, true]
|
||||
#heuristic node_target_weight(Package, 0) : attr("node", Package). [10, true]
|
||||
#heuristic literal_solved(ID) : literal(ID). [1, sign]
|
||||
#heuristic literal_solved(ID) : literal(ID). [50, init]
|
||||
#heuristic attr("hash", Package, Hash) : attr("root", Package). [45, init]
|
||||
|
||||
#heuristic attr("version", Package, Version) : version_declared(Package, Version, 0), attr("root", Package). [40, true]
|
||||
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), attr("root", Package). [40, true]
|
||||
#heuristic attr("variant_value", Package, Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", Package). [40, true]
|
||||
#heuristic attr("node_target", Package, Target) : package_target_weight(Target, Package, 0), attr("root", Package). [40, true]
|
||||
#heuristic node_target_weight(Package, 0) : attr("root", Package). [40, true]
|
||||
#heuristic node_compiler(Package, CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", Package). [40, true]
|
||||
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [30, true]
|
||||
#heuristic provider_weight(Package, Virtual, 0, R) : possible_provider_weight(Package, Virtual, 0, R), attr("virtual_node", Virtual). [30, true]
|
||||
#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [30, true]
|
||||
|
||||
#heuristic attr("version", Package, Version) : version_declared(Package, Version, 0), attr("node", Package). [20, true]
|
||||
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), attr("node", Package). [20, true]
|
||||
|
||||
#heuristic attr("node_target", Package, Target) : package_target_weight(Target, Package, 0), attr("node", Package). [20, true]
|
||||
#heuristic node_target_weight(Package, 0) : attr("node", Package). [20, true]
|
||||
#heuristic node_compiler(Package, CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("node", Package). [15, true]
|
||||
|
||||
#heuristic attr("variant_value", Package, Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
#heuristic attr("node_os", Package, OS) : buildable_os(OS). [10, true]
|
||||
|
||||
%-----------
|
||||
|
@@ -54,7 +54,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Tuple
|
||||
|
||||
@@ -118,7 +117,6 @@
|
||||
"SpecDeprecatedError",
|
||||
]
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
#: Valid pattern for an identifier in Spack
|
||||
|
||||
identifier_re = r"\w[\w-]*"
|
||||
@@ -193,9 +191,7 @@ def __call__(self, match):
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class ArchSpec(object):
|
||||
"""Aggregate the target platform, the operating system and the target
|
||||
microarchitecture into an architecture spec..
|
||||
"""
|
||||
"""Aggregate the target platform, the operating system and the target microarchitecture."""
|
||||
|
||||
@staticmethod
|
||||
def _return_arch(os_tag, target_tag):
|
||||
@@ -364,17 +360,11 @@ def target_or_none(t):
|
||||
|
||||
self._target = value
|
||||
|
||||
def satisfies(self, other, strict=False):
|
||||
"""Predicate to check if this spec satisfies a constraint.
|
||||
def satisfies(self, other: "ArchSpec") -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
other (ArchSpec or str): constraint on the current instance
|
||||
strict (bool): if ``False`` the function checks if the current
|
||||
instance *might* eventually satisfy the constraint. If
|
||||
``True`` it check if the constraint is satisfied right now.
|
||||
|
||||
Returns:
|
||||
True if the constraint is satisfied, False otherwise.
|
||||
other: spec to be satisfied
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
@@ -382,47 +372,69 @@ def satisfies(self, other, strict=False):
|
||||
for attribute in ("platform", "os"):
|
||||
other_attribute = getattr(other, attribute)
|
||||
self_attribute = getattr(self, attribute)
|
||||
if strict or self.concrete:
|
||||
if other_attribute and self_attribute != other_attribute:
|
||||
return False
|
||||
else:
|
||||
if other_attribute and self_attribute and self_attribute != other_attribute:
|
||||
return False
|
||||
if other_attribute and self_attribute != other_attribute:
|
||||
return False
|
||||
|
||||
# Check target
|
||||
return self.target_satisfies(other, strict=strict)
|
||||
return self._target_satisfies(other, strict=True)
|
||||
|
||||
def target_satisfies(self, other, strict):
|
||||
need_to_check = (
|
||||
bool(other.target) if strict or self.concrete else bool(other.target and self.target)
|
||||
)
|
||||
def intersects(self, other: "ArchSpec") -> bool:
|
||||
"""Return True if there exists at least one concrete spec that matches both
|
||||
self and other, otherwise False.
|
||||
|
||||
This operation is commutative, and if two specs intersect it means that one
|
||||
can constrain the other.
|
||||
|
||||
Args:
|
||||
other: spec to be checked for compatibility
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
# Check platform and os
|
||||
for attribute in ("platform", "os"):
|
||||
other_attribute = getattr(other, attribute)
|
||||
self_attribute = getattr(self, attribute)
|
||||
if other_attribute and self_attribute and self_attribute != other_attribute:
|
||||
return False
|
||||
|
||||
return self._target_satisfies(other, strict=False)
|
||||
|
||||
def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
||||
if strict is True:
|
||||
need_to_check = bool(other.target)
|
||||
else:
|
||||
need_to_check = bool(other.target and self.target)
|
||||
|
||||
# If there's no need to check we are fine
|
||||
if not need_to_check:
|
||||
return True
|
||||
|
||||
# self is not concrete, but other_target is there and strict=True
|
||||
# other_target is there and strict=True
|
||||
if self.target is None:
|
||||
return False
|
||||
|
||||
return bool(self.target_intersection(other))
|
||||
return bool(self._target_intersection(other))
|
||||
|
||||
def target_constrain(self, other):
|
||||
if not other.target_satisfies(self, strict=False):
|
||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||
if not other._target_satisfies(self, strict=False):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
if self.target_concrete:
|
||||
return False
|
||||
|
||||
elif other.target_concrete:
|
||||
self.target = other.target
|
||||
return True
|
||||
|
||||
# Compute the intersection of every combination of ranges in the lists
|
||||
results = self.target_intersection(other)
|
||||
# Do we need to dedupe here?
|
||||
self.target = ",".join(results)
|
||||
results = self._target_intersection(other)
|
||||
attribute_str = ",".join(results)
|
||||
|
||||
def target_intersection(self, other):
|
||||
if self.target == attribute_str:
|
||||
return False
|
||||
|
||||
self.target = attribute_str
|
||||
return True
|
||||
|
||||
def _target_intersection(self, other):
|
||||
results = []
|
||||
|
||||
if not self.target or not other.target:
|
||||
@@ -466,7 +478,7 @@ def target_intersection(self, other):
|
||||
results.append("%s:%s" % (n_min, n_max))
|
||||
return results
|
||||
|
||||
def constrain(self, other):
|
||||
def constrain(self, other: "ArchSpec") -> bool:
|
||||
"""Projects all architecture fields that are specified in the given
|
||||
spec onto the instance spec if they're missing from the instance
|
||||
spec.
|
||||
@@ -481,7 +493,7 @@ def constrain(self, other):
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
if not other.satisfies(self):
|
||||
if not other.intersects(self):
|
||||
raise UnsatisfiableArchitectureSpecError(other, self)
|
||||
|
||||
constrained = False
|
||||
@@ -491,7 +503,7 @@ def constrain(self, other):
|
||||
setattr(self, attr, ovalue)
|
||||
constrained = True
|
||||
|
||||
self.target_constrain(other)
|
||||
constrained |= self._target_constrain(other)
|
||||
|
||||
return constrained
|
||||
|
||||
@@ -507,7 +519,9 @@ def concrete(self):
|
||||
@property
|
||||
def target_concrete(self):
|
||||
"""True if the target is not a range or list."""
|
||||
return ":" not in str(self.target) and "," not in str(self.target)
|
||||
return (
|
||||
self.target is not None and ":" not in str(self.target) and "," not in str(self.target)
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
d = syaml.syaml_dict(
|
||||
@@ -593,11 +607,31 @@ def _autospec(self, compiler_spec_like):
|
||||
return compiler_spec_like
|
||||
return CompilerSpec(compiler_spec_like)
|
||||
|
||||
def satisfies(self, other, strict=False):
|
||||
other = self._autospec(other)
|
||||
return self.name == other.name and self.versions.satisfies(other.versions, strict=strict)
|
||||
def intersects(self, other: "CompilerSpec") -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
def constrain(self, other):
|
||||
For compiler specs this means that the name of the compiler must be the same for
|
||||
self and other, and that the versions ranges should intersect.
|
||||
|
||||
Args:
|
||||
other: spec to be satisfied
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
return self.name == other.name and self.versions.intersects(other.versions)
|
||||
|
||||
def satisfies(self, other: "CompilerSpec") -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
For compiler specs this means that the name of the compiler must be the same for
|
||||
self and other, and that the version range of self is a subset of that of other.
|
||||
|
||||
Args:
|
||||
other: spec to be satisfied
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
return self.name == other.name and self.versions.satisfies(other.versions)
|
||||
|
||||
def constrain(self, other: "CompilerSpec") -> bool:
|
||||
"""Intersect self's versions with other.
|
||||
|
||||
Return whether the CompilerSpec changed.
|
||||
@@ -605,7 +639,7 @@ def constrain(self, other):
|
||||
other = self._autospec(other)
|
||||
|
||||
# ensure that other will actually constrain this spec.
|
||||
if not other.satisfies(self):
|
||||
if not other.intersects(self):
|
||||
raise UnsatisfiableCompilerSpecError(other, self)
|
||||
|
||||
return self.versions.intersect(other.versions)
|
||||
@@ -738,24 +772,25 @@ def __init__(self, spec):
|
||||
super(FlagMap, self).__init__()
|
||||
self.spec = spec
|
||||
|
||||
def satisfies(self, other, strict=False):
|
||||
if strict or (self.spec and self.spec._concrete):
|
||||
return all(f in self and set(self[f]) == set(other[f]) for f in other)
|
||||
else:
|
||||
if not all(
|
||||
set(self[f]) == set(other[f]) for f in other if (other[f] != [] and f in self)
|
||||
):
|
||||
def satisfies(self, other):
|
||||
return all(f in self and self[f] == other[f] for f in other)
|
||||
|
||||
def intersects(self, other):
|
||||
common_types = set(self) & set(other)
|
||||
for flag_type in common_types:
|
||||
if not self[flag_type] or not other[flag_type]:
|
||||
# At least one of the two is empty
|
||||
continue
|
||||
|
||||
if self[flag_type] != other[flag_type]:
|
||||
return False
|
||||
|
||||
# Check that the propagation values match
|
||||
for flag_type in other:
|
||||
if not all(
|
||||
other[flag_type][i].propagate == self[flag_type][i].propagate
|
||||
for i in range(len(other[flag_type]))
|
||||
if flag_type in self
|
||||
):
|
||||
return False
|
||||
return True
|
||||
if not all(
|
||||
f1.propagate == f2.propagate for f1, f2 in zip(self[flag_type], other[flag_type])
|
||||
):
|
||||
# At least one propagation flag didn't match
|
||||
return False
|
||||
return True
|
||||
|
||||
def constrain(self, other):
|
||||
"""Add all flags in other that aren't in self to self.
|
||||
@@ -2613,9 +2648,9 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
# it's possible to build that configuration with Spack
|
||||
continue
|
||||
for conflict_spec, when_list in x.package_class.conflicts.items():
|
||||
if x.satisfies(conflict_spec, strict=True):
|
||||
if x.satisfies(conflict_spec):
|
||||
for when_spec, msg in when_list:
|
||||
if x.satisfies(when_spec, strict=True):
|
||||
if x.satisfies(when_spec):
|
||||
when = when_spec.copy()
|
||||
when.name = x.name
|
||||
matches.append((x, conflict_spec, when, msg))
|
||||
@@ -2667,7 +2702,7 @@ def inject_patches_variant(root):
|
||||
# Add any patches from the package to the spec.
|
||||
patches = []
|
||||
for cond, patch_list in s.package_class.patches.items():
|
||||
if s.satisfies(cond, strict=True):
|
||||
if s.satisfies(cond):
|
||||
for patch in patch_list:
|
||||
patches.append(patch)
|
||||
if patches:
|
||||
@@ -2685,7 +2720,7 @@ def inject_patches_variant(root):
|
||||
patches = []
|
||||
for cond, dependency in pkg_deps[dspec.spec.name].items():
|
||||
for pcond, patch_list in dependency.patches.items():
|
||||
if dspec.parent.satisfies(cond, strict=True) and dspec.spec.satisfies(pcond):
|
||||
if dspec.parent.satisfies(cond) and dspec.spec.satisfies(pcond):
|
||||
patches.extend(patch_list)
|
||||
if patches:
|
||||
all_patches = spec_to_patches.setdefault(id(dspec.spec), [])
|
||||
@@ -2943,7 +2978,7 @@ def _evaluate_dependency_conditions(self, name):
|
||||
# evaluate when specs to figure out constraints on the dependency.
|
||||
dep = None
|
||||
for when_spec, dependency in conditions.items():
|
||||
if self.satisfies(when_spec, strict=True):
|
||||
if self.satisfies(when_spec):
|
||||
if dep is None:
|
||||
dep = dp.Dependency(self.name, Spec(name), type=())
|
||||
try:
|
||||
@@ -2978,7 +3013,7 @@ def _find_provider(self, vdep, provider_index):
|
||||
# result.
|
||||
for provider in providers:
|
||||
for spec in providers:
|
||||
if spec is not provider and provider.satisfies(spec):
|
||||
if spec is not provider and provider.intersects(spec):
|
||||
providers.remove(spec)
|
||||
# Can't have multiple providers for the same thing in one spec.
|
||||
if len(providers) > 1:
|
||||
@@ -3295,9 +3330,15 @@ def update_variant_validate(self, variant_name, values):
|
||||
pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
|
||||
|
||||
def constrain(self, other, deps=True):
|
||||
"""Merge the constraints of other with self.
|
||||
"""Intersect self with other in-place. Return True if self changed, False otherwise.
|
||||
|
||||
Returns True if the spec changed as a result, False if not.
|
||||
Args:
|
||||
other: constraint to be added to self
|
||||
deps: if False, constrain only the root node, otherwise constrain dependencies
|
||||
as well.
|
||||
|
||||
Raises:
|
||||
spack.error.UnsatisfiableSpecError: when self cannot be constrained
|
||||
"""
|
||||
# If we are trying to constrain a concrete spec, either the spec
|
||||
# already satisfies the constraint (and the method returns False)
|
||||
@@ -3377,6 +3418,9 @@ def constrain(self, other, deps=True):
|
||||
if deps:
|
||||
changed |= self._constrain_dependencies(other)
|
||||
|
||||
if other.concrete and not self.concrete and other.satisfies(self):
|
||||
self._finalize_concretization()
|
||||
|
||||
return changed
|
||||
|
||||
def _constrain_dependencies(self, other):
|
||||
@@ -3389,7 +3433,7 @@ def _constrain_dependencies(self, other):
|
||||
# TODO: might want more detail than this, e.g. specific deps
|
||||
# in violation. if this becomes a priority get rid of this
|
||||
# check and be more specific about what's wrong.
|
||||
if not other.satisfies_dependencies(self):
|
||||
if not other._intersects_dependencies(self):
|
||||
raise UnsatisfiableDependencySpecError(other, self)
|
||||
|
||||
if any(not d.name for d in other.traverse(root=False)):
|
||||
@@ -3451,58 +3495,49 @@ def _autospec(self, spec_like):
|
||||
return spec_like
|
||||
return Spec(spec_like)
|
||||
|
||||
def satisfies(self, other, deps=True, strict=False):
|
||||
"""Determine if this spec satisfies all constraints of another.
|
||||
def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if there exists at least one concrete spec that matches both
|
||||
self and other, otherwise False.
|
||||
|
||||
There are two senses for satisfies, depending on the ``strict``
|
||||
argument.
|
||||
This operation is commutative, and if two specs intersect it means that one
|
||||
can constrain the other.
|
||||
|
||||
* ``strict=False``: the left-hand side and right-hand side have
|
||||
non-empty intersection. For example ``zlib`` satisfies
|
||||
``zlib@1.2.3`` and ``zlib@1.2.3`` satisfies ``zlib``. In this
|
||||
sense satisfies is a commutative operation: ``x.satisfies(y)``
|
||||
if and only if ``y.satisfies(x)``.
|
||||
|
||||
* ``strict=True``: the left-hand side is a subset of the right-hand
|
||||
side. For example ``zlib@1.2.3`` satisfies ``zlib``, but ``zlib``
|
||||
does not satisfy ``zlib@1.2.3``. In this sense satisfies is not
|
||||
commutative: the left-hand side should be at least as constrained
|
||||
as the right-hand side.
|
||||
Args:
|
||||
other: spec to be checked for compatibility
|
||||
deps: if True check compatibility of dependency nodes too, if False only check root
|
||||
"""
|
||||
|
||||
other = self._autospec(other)
|
||||
|
||||
# Optimizations for right-hand side concrete:
|
||||
# 1. For subset (strict=True) tests this means the left-hand side must
|
||||
# be the same singleton with identical hash. Notice that package hashes
|
||||
# can be different for otherwise indistinguishable concrete Spec objects.
|
||||
# 2. For non-empty intersection (strict=False) we only have a fast path
|
||||
# when the left-hand side is also concrete.
|
||||
if other.concrete:
|
||||
if strict:
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
elif self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
if other.concrete and self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
if self.virtual and other.virtual:
|
||||
# Two virtual specs intersect only if there are providers for both
|
||||
lhs = spack.repo.path.providers_for(str(self))
|
||||
rhs = spack.repo.path.providers_for(str(other))
|
||||
intersection = [s for s in lhs if any(s.intersects(z) for z in rhs)]
|
||||
return bool(intersection)
|
||||
|
||||
# A provider can satisfy a virtual dependency.
|
||||
elif self.virtual or other.virtual:
|
||||
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
|
||||
pkg = pkg_cls(self)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(non_virtual_spec.fullname)
|
||||
pkg = pkg_cls(non_virtual_spec)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
|
||||
if pkg.provides(other.name):
|
||||
if pkg.provides(virtual_spec.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(
|
||||
self.satisfies(when, deps=False, strict=strict) for when in when_specs
|
||||
non_virtual_spec.intersects(when, deps=False) for when in when_specs
|
||||
):
|
||||
if provided.satisfies(other):
|
||||
if provided.intersects(virtual_spec):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -3513,75 +3548,41 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
and self.namespace != other.namespace
|
||||
):
|
||||
return False
|
||||
|
||||
if self.versions and other.versions:
|
||||
if not self.versions.satisfies(other.versions, strict=strict):
|
||||
if not self.versions.intersects(other.versions):
|
||||
return False
|
||||
elif strict and (self.versions or other.versions):
|
||||
return False
|
||||
|
||||
# None indicates no constraints when not strict.
|
||||
if self.compiler and other.compiler:
|
||||
if not self.compiler.satisfies(other.compiler, strict=strict):
|
||||
if not self.compiler.intersects(other.compiler):
|
||||
return False
|
||||
elif strict and (other.compiler and not self.compiler):
|
||||
|
||||
if not self.variants.intersects(other.variants):
|
||||
return False
|
||||
|
||||
var_strict = strict
|
||||
if (not self.name) or (not other.name):
|
||||
var_strict = True
|
||||
if not self.variants.satisfies(other.variants, strict=var_strict):
|
||||
return False
|
||||
|
||||
# Architecture satisfaction is currently just string equality.
|
||||
# If not strict, None means unconstrained.
|
||||
if self.architecture and other.architecture:
|
||||
if not self.architecture.satisfies(other.architecture, strict):
|
||||
if not self.architecture.intersects(other.architecture):
|
||||
return False
|
||||
elif strict and (other.architecture and not self.architecture):
|
||||
return False
|
||||
|
||||
if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
|
||||
if not self.compiler_flags.intersects(other.compiler_flags):
|
||||
return False
|
||||
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
deps_strict = strict
|
||||
if self._concrete and not other.name:
|
||||
# We're dealing with existing specs
|
||||
deps_strict = True
|
||||
return self.satisfies_dependencies(other, strict=deps_strict)
|
||||
return self._intersects_dependencies(other)
|
||||
else:
|
||||
return True
|
||||
|
||||
def satisfies_dependencies(self, other, strict=False):
|
||||
"""
|
||||
This checks constraints on common dependencies against each other.
|
||||
"""
|
||||
def _intersects_dependencies(self, other):
|
||||
other = self._autospec(other)
|
||||
|
||||
# If there are no constraints to satisfy, we're done.
|
||||
if not other._dependencies:
|
||||
return True
|
||||
|
||||
if strict:
|
||||
# if we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies:
|
||||
return False
|
||||
|
||||
# use list to prevent double-iteration
|
||||
selfdeps = list(self.traverse(root=False))
|
||||
otherdeps = list(other.traverse(root=False))
|
||||
if not all(any(d.satisfies(dep, strict=True) for d in selfdeps) for dep in otherdeps):
|
||||
return False
|
||||
|
||||
elif not self._dependencies:
|
||||
# if not strict, this spec *could* eventually satisfy the
|
||||
# constraints on other.
|
||||
if not other._dependencies or not self._dependencies:
|
||||
# one spec *could* eventually satisfy the other
|
||||
return True
|
||||
|
||||
# Handle first-order constraints directly
|
||||
for name in self.common_dependencies(other):
|
||||
if not self[name].satisfies(other[name], deps=False):
|
||||
if not self[name].intersects(other[name], deps=False):
|
||||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
@@ -3609,6 +3610,89 @@ def satisfies_dependencies(self, other, strict=False):
|
||||
|
||||
return True
|
||||
|
||||
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
other: spec to be satisfied
|
||||
deps: if True descend to dependencies, otherwise only check root node
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
if other.concrete:
|
||||
# The left-hand side must be the same singleton with identical hash. Notice that
|
||||
# package hashes can be different for otherwise indistinguishable concrete Spec
|
||||
# objects.
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
|
||||
pkg = pkg_cls(self)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(self.satisfies(when, deps=False) for when in when_specs):
|
||||
if provided.intersects(other):
|
||||
return True
|
||||
return False
|
||||
|
||||
# namespaces either match, or other doesn't require one.
|
||||
if (
|
||||
other.namespace is not None
|
||||
and self.namespace is not None
|
||||
and self.namespace != other.namespace
|
||||
):
|
||||
return False
|
||||
|
||||
if not self.versions.satisfies(other.versions):
|
||||
return False
|
||||
|
||||
if self.compiler and other.compiler:
|
||||
if not self.compiler.satisfies(other.compiler):
|
||||
return False
|
||||
elif other.compiler and not self.compiler:
|
||||
return False
|
||||
|
||||
if not self.variants.satisfies(other.variants):
|
||||
return False
|
||||
|
||||
if self.architecture and other.architecture:
|
||||
if not self.architecture.satisfies(other.architecture):
|
||||
return False
|
||||
elif other.architecture and not self.architecture:
|
||||
return False
|
||||
|
||||
if not self.compiler_flags.satisfies(other.compiler_flags):
|
||||
return False
|
||||
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if not deps:
|
||||
return True
|
||||
|
||||
# If there are no constraints to satisfy, we're done.
|
||||
if not other._dependencies:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies:
|
||||
return False
|
||||
|
||||
# If we arrived here, then rhs is abstract. At the moment we don't care about the edge
|
||||
# structure of an abstract DAG - hence the deps=False parameter.
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
|
||||
def virtual_dependencies(self):
|
||||
"""Return list of any virtual deps in this spec."""
|
||||
return [spec for spec in self.traverse() if spec.virtual]
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -33,14 +32,6 @@ def _impl(self, other):
|
||||
return _impl
|
||||
|
||||
|
||||
#: Translation table from archspec deprecated names
|
||||
_DEPRECATED_ARCHSPEC_NAMES = {
|
||||
"graviton": "cortex_a72",
|
||||
"graviton2": "neoverse_n1",
|
||||
"graviton3": "neoverse_v1",
|
||||
}
|
||||
|
||||
|
||||
class Target(object):
|
||||
def __init__(self, name, module_name=None):
|
||||
"""Target models microarchitectures and their compatibility.
|
||||
@@ -52,10 +43,6 @@ def __init__(self, name, module_name=None):
|
||||
like Cray (e.g. craype-compiler)
|
||||
"""
|
||||
if not isinstance(name, archspec.cpu.Microarchitecture):
|
||||
if name in _DEPRECATED_ARCHSPEC_NAMES:
|
||||
msg = "'target={}' is deprecated, use 'target={}' instead"
|
||||
name, old_name = _DEPRECATED_ARCHSPEC_NAMES[name], name
|
||||
warnings.warn(msg.format(old_name, name))
|
||||
name = archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
|
||||
self.microarchitecture = name
|
||||
self.module_name = module_name
|
||||
|
@@ -183,7 +183,7 @@ def test_optimization_flags_with_custom_versions(
|
||||
def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple):
|
||||
architecture = spack.spec.ArchSpec(architecture_tuple)
|
||||
constraint = spack.spec.ArchSpec(constraint_tuple)
|
||||
assert not architecture.satisfies(constraint, strict=True)
|
||||
assert not architecture.satisfies(constraint)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@@ -2,11 +2,13 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import filecmp
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import tarfile
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
@@ -952,3 +954,81 @@ def fake_build_tarball(node, push_url, **kwargs):
|
||||
bindist.push([spec], push_url, include_root=root, include_dependencies=deps)
|
||||
|
||||
assert packages_to_push == expected
|
||||
|
||||
|
||||
def test_reproducible_tarball_is_reproducible(tmpdir):
|
||||
p = tmpdir.mkdir("prefix")
|
||||
p.mkdir("bin")
|
||||
p.mkdir(".spack")
|
||||
|
||||
app = p.join("bin", "app")
|
||||
|
||||
tarball_1 = str(tmpdir.join("prefix-1.tar.gz"))
|
||||
tarball_2 = str(tmpdir.join("prefix-2.tar.gz"))
|
||||
|
||||
with open(app, "w") as f:
|
||||
f.write("hello world")
|
||||
|
||||
buildinfo = {"metadata": "yes please"}
|
||||
|
||||
# Create a tarball with a certain mtime of bin/app
|
||||
os.utime(app, times=(0, 0))
|
||||
bindist._do_create_tarball(tarball_1, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo)
|
||||
|
||||
# Do it another time with different mtime of bin/app
|
||||
os.utime(app, times=(10, 10))
|
||||
bindist._do_create_tarball(tarball_2, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo)
|
||||
|
||||
# They should be bitwise identical:
|
||||
assert filecmp.cmp(tarball_1, tarball_2, shallow=False)
|
||||
|
||||
# Sanity check for contents:
|
||||
with tarfile.open(tarball_1, mode="r") as f:
|
||||
for m in f.getmembers():
|
||||
assert m.uid == m.gid == m.mtime == 0
|
||||
assert m.uname == m.gname == ""
|
||||
|
||||
assert set(f.getnames()) == {
|
||||
"pkg",
|
||||
"pkg/bin",
|
||||
"pkg/bin/app",
|
||||
"pkg/.spack",
|
||||
"pkg/.spack/binary_distribution",
|
||||
}
|
||||
|
||||
|
||||
def test_tarball_normalized_permissions(tmpdir):
|
||||
p = tmpdir.mkdir("prefix")
|
||||
p.mkdir("bin")
|
||||
p.mkdir("share")
|
||||
p.mkdir(".spack")
|
||||
|
||||
app = p.join("bin", "app")
|
||||
data = p.join("share", "file")
|
||||
tarball = str(tmpdir.join("prefix.tar.gz"))
|
||||
|
||||
# Everyone can write & execute. This should turn into 0o755 when the tarball is
|
||||
# extracted (on a different system).
|
||||
with open(app, "w", opener=lambda path, flags: os.open(path, flags, 0o777)) as f:
|
||||
f.write("hello world")
|
||||
|
||||
# User doesn't have execute permissions, but group/world have; this should also
|
||||
# turn into 0o644 (user read/write, group&world only read).
|
||||
with open(data, "w", opener=lambda path, flags: os.open(path, flags, 0o477)) as f:
|
||||
f.write("hello world")
|
||||
|
||||
bindist._do_create_tarball(tarball, binaries_dir=p, pkg_dir="pkg", buildinfo={})
|
||||
|
||||
with tarfile.open(tarball) as tar:
|
||||
path_to_member = {member.name: member for member in tar.getmembers()}
|
||||
|
||||
# directories should have 0o755
|
||||
assert path_to_member["pkg"].mode == 0o755
|
||||
assert path_to_member["pkg/bin"].mode == 0o755
|
||||
assert path_to_member["pkg/.spack"].mode == 0o755
|
||||
|
||||
# executable-by-user files should be 0o755
|
||||
assert path_to_member["pkg/bin/app"].mode == 0o755
|
||||
|
||||
# not-executable-by-user files should be 0o644
|
||||
assert path_to_member["pkg/share/file"].mode == 0o644
|
||||
|
@@ -127,13 +127,13 @@ def test_static_to_shared_library(build_environment):
|
||||
"linux": (
|
||||
"/bin/mycc -shared"
|
||||
" -Wl,--disable-new-dtags"
|
||||
" -Wl,-soname,{2} -Wl,--whole-archive {0}"
|
||||
" -Wl,-soname -Wl,{2} -Wl,--whole-archive {0}"
|
||||
" -Wl,--no-whole-archive -o {1}"
|
||||
),
|
||||
"darwin": (
|
||||
"/bin/mycc -dynamiclib"
|
||||
" -Wl,--disable-new-dtags"
|
||||
" -install_name {1} -Wl,-force_load,{0} -o {1}"
|
||||
" -install_name {1} -Wl,-force_load -Wl,{0} -o {1}"
|
||||
),
|
||||
}
|
||||
|
||||
|
@@ -268,16 +268,18 @@ def test_cmake_std_args(self, default_mock_concretization):
|
||||
s = default_mock_concretization("mpich")
|
||||
assert spack.build_systems.cmake.CMakeBuilder.std_args(s.package)
|
||||
|
||||
def test_cmake_bad_generator(self, monkeypatch, default_mock_concretization):
|
||||
def test_cmake_bad_generator(self, default_mock_concretization):
|
||||
s = default_mock_concretization("cmake-client")
|
||||
monkeypatch.setattr(type(s.package), "generator", "Yellow Sticky Notes", raising=False)
|
||||
with pytest.raises(spack.package_base.InstallError):
|
||||
s.package.builder.std_cmake_args
|
||||
spack.build_systems.cmake.CMakeBuilder.std_args(
|
||||
s.package, generator="Yellow Sticky Notes"
|
||||
)
|
||||
|
||||
def test_cmake_secondary_generator(self, default_mock_concretization):
|
||||
s = default_mock_concretization("cmake-client")
|
||||
s.package.generator = "CodeBlocks - Unix Makefiles"
|
||||
assert s.package.builder.std_cmake_args
|
||||
assert spack.build_systems.cmake.CMakeBuilder.std_args(
|
||||
s.package, generator="CodeBlocks - Unix Makefiles"
|
||||
)
|
||||
|
||||
def test_define(self, default_mock_concretization):
|
||||
s = default_mock_concretization("cmake-client")
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.environment import set_env, system_dirs
|
||||
from spack.util.environment import SYSTEM_DIRS, set_env
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
#
|
||||
@@ -160,7 +160,7 @@ def wrapper_environment(working_env):
|
||||
SPACK_DEBUG_LOG_ID="foo-hashabc",
|
||||
SPACK_COMPILER_SPEC="gcc@4.4.7",
|
||||
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
|
||||
SPACK_SYSTEM_DIRS=":".join(system_dirs),
|
||||
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
|
||||
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
|
||||
@@ -342,6 +342,16 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
)
|
||||
|
||||
|
||||
def test_Wl_parsing(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["-Wl,-rpath,/a,--enable-new-dtags,-rpath=/b,--rpath", "-Wl,/c"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ ["-Wl,--disable-new-dtags", "-Wl,-rpath,/a", "-Wl,-rpath,/b", "-Wl,-rpath,/c"],
|
||||
)
|
||||
|
||||
|
||||
def test_dep_rpath(wrapper_environment):
|
||||
"""Ensure RPATHs for root package are added."""
|
||||
check_args(cc, test_args, [real_cc] + target_args + common_compile_args)
|
||||
|
@@ -408,19 +408,36 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
||||
|
||||
touched = ["libdwarf"]
|
||||
|
||||
# traversing both directions from libdwarf in the graphs depicted
|
||||
# above (and additionally including dependencies of dependents of
|
||||
# libdwarf) results in the following possibly affected env specs:
|
||||
# mpileaks, callpath, dyninst, libdwarf, libelf, and mpich.
|
||||
# Unaffected specs are hypre and it's dependencies.
|
||||
# Make sure we return the correct set of possibly affected specs,
|
||||
# given a dependent traversal depth and the fact that the touched
|
||||
# package is libdwarf. Passing traversal depth of None or something
|
||||
# equal to or larger than the greatest depth in the graph are
|
||||
# equivalent and result in traversal of all specs from the touched
|
||||
# package to the root. Passing negative traversal depth results in
|
||||
# no spec traversals. Passing any other number yields differing
|
||||
# numbers of possibly affected specs.
|
||||
|
||||
affected_specs = ci.get_spec_filter_list(e1, touched)
|
||||
affected_pkg_names = set([s.name for s in affected_specs])
|
||||
expected_affected_pkg_names = set(
|
||||
["mpileaks", "mpich", "callpath", "dyninst", "libdwarf", "libelf"]
|
||||
)
|
||||
full_set = set(["mpileaks", "mpich", "callpath", "dyninst", "libdwarf", "libelf"])
|
||||
empty_set = set([])
|
||||
depth_2_set = set(["mpich", "callpath", "dyninst", "libdwarf", "libelf"])
|
||||
depth_1_set = set(["dyninst", "libdwarf", "libelf"])
|
||||
depth_0_set = set(["libdwarf", "libelf"])
|
||||
|
||||
assert affected_pkg_names == expected_affected_pkg_names
|
||||
expectations = {
|
||||
None: full_set,
|
||||
3: full_set,
|
||||
100: full_set,
|
||||
-1: empty_set,
|
||||
0: depth_0_set,
|
||||
1: depth_1_set,
|
||||
2: depth_2_set,
|
||||
}
|
||||
|
||||
for key, val in expectations.items():
|
||||
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
|
||||
affected_pkg_names = set([s.name for s in affected_specs])
|
||||
print(f"{key}: {affected_pkg_names}")
|
||||
assert affected_pkg_names == val
|
||||
|
||||
|
||||
@pytest.mark.regression("29947")
|
||||
|
@@ -28,8 +28,8 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.schema.gitlab_ci import schema as gitlab_ci_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -177,26 +177,29 @@ def test_ci_generate_with_env(
|
||||
- [$old-gcc-pkgs]
|
||||
mirrors:
|
||||
some-mirror: {0}
|
||||
gitlab-ci:
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
- cleanup-job:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
- reindex-job:
|
||||
script:: [hello, world]
|
||||
cdash:
|
||||
build-group: Not important
|
||||
url: https://my.fake.cdash
|
||||
@@ -239,6 +242,10 @@ def test_ci_generate_with_env(
|
||||
|
||||
|
||||
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
||||
"""Validate the needs graph in the generate CI"""
|
||||
|
||||
# TODO: Fix the logic to catch errors where expected packages/needs are not
|
||||
# found.
|
||||
for job_name, job_def in yaml_contents.items():
|
||||
for needs_def_name, needs_list in needs_graph.items():
|
||||
if job_name.startswith(needs_def_name):
|
||||
@@ -269,27 +276,30 @@ def test_ci_generate_bootstrap_gcc(
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@9.5
|
||||
- gcc@9.0
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@9.5
|
||||
- dyninst%gcc@3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -326,26 +336,30 @@ def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@9.5
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@9.5
|
||||
- dyninst%gcc@3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
enable-artifacts-buildcache: True
|
||||
"""
|
||||
)
|
||||
@@ -398,7 +412,7 @@ def test_ci_generate_with_env_missing_section(
|
||||
"""
|
||||
)
|
||||
|
||||
expect_out = 'Error: Environment yaml does not have "gitlab-ci" section'
|
||||
expect_out = 'Error: Environment yaml does not have "ci" section'
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
@@ -427,12 +441,13 @@ def test_ci_generate_with_cdash_token(
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -485,11 +500,12 @@ def test_ci_generate_with_custom_scripts(
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
variables:
|
||||
@@ -576,17 +592,18 @@ def test_ci_generate_pkg_with_deps(
|
||||
- flatten-deps
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- flatten-deps
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- dependency-install
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
@@ -642,22 +659,23 @@ def test_ci_generate_for_pr_pipeline(
|
||||
- flatten-deps
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- flatten-deps
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- dependency-install
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
service-job-attributes:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
- cleanup-job:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
rebuild-index: False
|
||||
"""
|
||||
)
|
||||
@@ -703,12 +721,13 @@ def test_ci_generate_with_external_pkg(
|
||||
- externaltest
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
- externaltest
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -744,7 +763,7 @@ def test_ci_rebuild_missing_config(tmpdir, working_env, mutable_mock_env_path):
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
env_cmd("activate", "--without-view", "--sh", "test")
|
||||
out = ci_cmd("rebuild", fail_on_error=False)
|
||||
assert "env containing gitlab-ci" in out
|
||||
assert "env containing ci" in out
|
||||
|
||||
env_cmd("deactivate")
|
||||
|
||||
@@ -785,17 +804,18 @@ def create_rebuild_env(tmpdir, pkg_name, broken_tests=False):
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {1}
|
||||
gitlab-ci:
|
||||
ci:
|
||||
broken-specs-url: {2}
|
||||
broken-tests-packages: {3}
|
||||
temporary-storage-url-prefix: {4}
|
||||
mappings:
|
||||
- match:
|
||||
- {0}
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- {0}
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
cdash:
|
||||
build-group: Not important
|
||||
url: https://my.fake.cdash
|
||||
@@ -875,10 +895,9 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
|
||||
@pytest.mark.parametrize("broken_tests", [True, False])
|
||||
def test_ci_rebuild_mock_success(
|
||||
tmpdir,
|
||||
config,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
install_mockery_mutable_config,
|
||||
mock_gnupghome,
|
||||
mock_stage,
|
||||
mock_fetch,
|
||||
@@ -914,7 +933,7 @@ def test_ci_rebuild(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
mock_gnupghome,
|
||||
@@ -1014,12 +1033,13 @@ def test_ci_nothing_to_rebuild(
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
gitlab-ci:
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -1101,18 +1121,19 @@ def test_ci_generate_mirror_override(
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
- cleanup-job:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
@@ -1183,19 +1204,24 @@ def test_push_mirror_contents(
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
gitlab-ci:
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
- cleanup-job:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
- any-job:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
@@ -1345,56 +1371,58 @@ def test_ci_generate_override_runner_attrs(
|
||||
- a
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
tags:
|
||||
- toplevel
|
||||
- toplevel2
|
||||
variables:
|
||||
ONE: toplevelvarone
|
||||
TWO: toplevelvartwo
|
||||
before_script:
|
||||
- pre step one
|
||||
- pre step two
|
||||
script:
|
||||
- main step
|
||||
after_script:
|
||||
- post step one
|
||||
match_behavior: {0}
|
||||
mappings:
|
||||
- match:
|
||||
- flatten-deps
|
||||
runner-attributes:
|
||||
tags:
|
||||
- specific-one
|
||||
variables:
|
||||
THREE: specificvarthree
|
||||
- match:
|
||||
- dependency-install
|
||||
- match:
|
||||
- a
|
||||
remove-attributes:
|
||||
tags:
|
||||
- toplevel2
|
||||
runner-attributes:
|
||||
tags:
|
||||
- specific-a
|
||||
variables:
|
||||
ONE: specificvarone
|
||||
TWO: specificvartwo
|
||||
before_script:
|
||||
- custom pre step one
|
||||
script:
|
||||
- custom main step
|
||||
after_script:
|
||||
- custom post step one
|
||||
- match:
|
||||
- a
|
||||
runner-attributes:
|
||||
tags:
|
||||
- specific-a-2
|
||||
service-job-attributes:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- match_behavior: {0}
|
||||
submapping:
|
||||
- match:
|
||||
- flatten-deps
|
||||
build-job:
|
||||
tags:
|
||||
- specific-one
|
||||
variables:
|
||||
THREE: specificvarthree
|
||||
- match:
|
||||
- dependency-install
|
||||
- match:
|
||||
- a
|
||||
build-job:
|
||||
tags:
|
||||
- specific-a-2
|
||||
- match:
|
||||
- a
|
||||
build-job-remove:
|
||||
tags:
|
||||
- toplevel2
|
||||
build-job:
|
||||
tags:
|
||||
- specific-a
|
||||
variables:
|
||||
ONE: specificvarone
|
||||
TWO: specificvartwo
|
||||
before_script::
|
||||
- - custom pre step one
|
||||
script::
|
||||
- - custom main step
|
||||
after_script::
|
||||
- custom post step one
|
||||
- build-job:
|
||||
tags:
|
||||
- toplevel
|
||||
- toplevel2
|
||||
variables:
|
||||
ONE: toplevelvarone
|
||||
TWO: toplevelvartwo
|
||||
before_script:
|
||||
- - pre step one
|
||||
- pre step two
|
||||
script::
|
||||
- - main step
|
||||
after_script:
|
||||
- - post step one
|
||||
- cleanup-job:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
""".format(
|
||||
match_behavior
|
||||
)
|
||||
@@ -1420,8 +1448,6 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) b" in ci_key:
|
||||
assert False
|
||||
if "(specs) a" in ci_key:
|
||||
# Make sure a's attributes override variables, and all the
|
||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||
@@ -1495,10 +1521,11 @@ def test_ci_generate_with_workarounds(
|
||||
- callpath%gcc@9.5
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match: ['%gcc@9.5']
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -1550,11 +1577,12 @@ def test_ci_rebuild_index(
|
||||
- callpath
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -1642,29 +1670,30 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
- b%gcc@12.2.0
|
||||
mirrors:
|
||||
atestm: {0}
|
||||
gitlab-ci:
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
""".format(
|
||||
@@ -1743,18 +1772,22 @@ def test_ci_generate_prune_untouched(
|
||||
- callpath
|
||||
mirrors:
|
||||
some-mirror: {0}
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
# Dependency graph rooted at callpath
|
||||
# callpath -> dyninst -> libelf
|
||||
# -> libdwarf -> libelf
|
||||
# -> mpich
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
@@ -1765,19 +1798,93 @@ def fake_compute_affected(r1=None, r2=None):
|
||||
def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
with ev.read("test"):
|
||||
env_hashes = {}
|
||||
|
||||
with ev.read("test") as active_env:
|
||||
monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected)
|
||||
monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed)
|
||||
|
||||
active_env.concretize()
|
||||
|
||||
for s in active_env.all_specs():
|
||||
env_hashes[s.name] = s.dag_hash()
|
||||
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
print(contents)
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
generated_hashes = []
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "archive-files" in ci_key:
|
||||
print("Error: archive-files should have been pruned")
|
||||
assert False
|
||||
if ci_key.startswith("(specs)"):
|
||||
generated_hashes.append(
|
||||
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
|
||||
)
|
||||
|
||||
assert env_hashes["archive-files"] not in generated_hashes
|
||||
for spec_name in ["callpath", "dyninst", "mpich", "libdwarf", "libelf"]:
|
||||
assert env_hashes[spec_name] in generated_hashes
|
||||
|
||||
|
||||
def test_ci_generate_prune_env_vars(
|
||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment, monkeypatch
|
||||
):
|
||||
"""Make sure environment variables controlling untouched spec
|
||||
pruning behave as expected."""
|
||||
os.environ.update({"SPACK_PRUNE_UNTOUCHED": "TRUE"}) # enables pruning of untouched specs
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- libelf
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
|
||||
def fake_compute_affected(r1=None, r2=None):
|
||||
return ["libdwarf"]
|
||||
|
||||
def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
expected_depth_param = None
|
||||
|
||||
def check_get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
assert dependent_traverse_depth == expected_depth_param
|
||||
return set()
|
||||
|
||||
monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected)
|
||||
monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed)
|
||||
monkeypatch.setattr(ci, "get_spec_filter_list", check_get_spec_filter_list)
|
||||
|
||||
expectations = {"-1": -1, "0": 0, "True": None}
|
||||
|
||||
for key, val in expectations.items():
|
||||
with ev.read("test"):
|
||||
os.environ.update({"SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH": key})
|
||||
expected_depth_param = val
|
||||
# Leaving out the mirror in the spack.yaml above means the
|
||||
# pipeline generation command will fail, pretty much immediately.
|
||||
# But for this test, we only care how the environment variables
|
||||
# for pruning are handled, the faster the better. So allow the
|
||||
# spack command to fail.
|
||||
ci_cmd("generate", fail_on_error=False)
|
||||
|
||||
|
||||
def test_ci_subcommands_without_mirror(
|
||||
@@ -1796,11 +1903,12 @@ def test_ci_subcommands_without_mirror(
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -1829,12 +1937,13 @@ def test_ensure_only_one_temporary_storage():
|
||||
"""Make sure 'gitlab-ci' section of env does not allow specification of
|
||||
both 'enable-artifacts-buildcache' and 'temporary-storage-url-prefix'."""
|
||||
gitlab_ci_template = """
|
||||
gitlab-ci:
|
||||
ci:
|
||||
{0}
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- notcheckedhere
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
@@ -1850,21 +1959,21 @@ def test_ensure_only_one_temporary_storage():
|
||||
|
||||
# User can specify "enable-artifacts-buildcache" (boolean)
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(enable_artifacts))
|
||||
jsonschema.validate(yaml_obj, gitlab_ci_schema)
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# User can also specify "temporary-storage-url-prefix" (string)
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(temp_storage))
|
||||
jsonschema.validate(yaml_obj, gitlab_ci_schema)
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# However, specifying both should fail to validate
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(specify_both))
|
||||
with pytest.raises(jsonschema.ValidationError):
|
||||
jsonschema.validate(yaml_obj, gitlab_ci_schema)
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# Specifying neither should be fine too, as neither of these properties
|
||||
# should be required
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(specify_neither))
|
||||
jsonschema.validate(yaml_obj, gitlab_ci_schema)
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
|
||||
def test_ci_generate_temp_storage_url(
|
||||
@@ -1886,12 +1995,13 @@ def test_ci_generate_temp_storage_url(
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
temporary-storage-url-prefix: file:///work/temp/mirror
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -1957,15 +2067,16 @@ def test_ci_generate_read_broken_specs_url(
|
||||
- a
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
broken-specs-url: "{0}"
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- a
|
||||
- flatten-deps
|
||||
- b
|
||||
- dependency-install
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -2006,26 +2117,27 @@ def test_ci_generate_external_signing_job(
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
ci:
|
||||
temporary-storage-url-prefix: file:///work/temp/mirror
|
||||
mappings:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
signing-job-attributes:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
- secretrunner
|
||||
image:
|
||||
name: customdockerimage
|
||||
entrypoint: []
|
||||
variables:
|
||||
IMPORTANT_INFO: avalue
|
||||
script:
|
||||
- echo hello
|
||||
- signing-job:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
- secretrunner
|
||||
image:
|
||||
name: customdockerimage
|
||||
entrypoint: []
|
||||
variables:
|
||||
IMPORTANT_INFO: avalue
|
||||
script::
|
||||
- echo hello
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -2068,11 +2180,12 @@ def test_ci_reproduce(
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: file:///some/fake/mirror
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: {0}
|
||||
@@ -2149,7 +2262,9 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
||||
working_dir.strpath,
|
||||
output=str,
|
||||
)
|
||||
expect_out = "docker run --rm -v {0}:{0} -ti {1}".format(working_dir.strpath, image_name)
|
||||
expect_out = "docker run --rm --name spack_reproducer -v {0}:{0}:Z -ti {1}".format(
|
||||
os.path.realpath(working_dir.strpath), image_name
|
||||
)
|
||||
|
||||
assert expect_out in rep_out
|
||||
|
||||
|
@@ -122,19 +122,18 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||
assert env_spec2
|
||||
|
||||
|
||||
def test_concretizer_arguments(mutable_config, mock_packages):
|
||||
@pytest.mark.parametrize(
|
||||
"arg,config", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
|
||||
)
|
||||
def test_concretizer_arguments(mutable_config, mock_packages, arg, config):
|
||||
"""Ensure that ConfigSetAction is doing the right thing."""
|
||||
spec = spack.main.SpackCommand("spec")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is None
|
||||
|
||||
spec("--reuse", "zlib")
|
||||
spec(arg, "zlib")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is True
|
||||
|
||||
spec("--fresh", "zlib")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is False
|
||||
assert spack.config.get("concretizer:reuse", None) == config
|
||||
|
||||
|
||||
def test_use_buildcache_type():
|
||||
|
@@ -82,8 +82,8 @@ def test_change_match_spec():
|
||||
|
||||
change("--match-spec", "mpileaks@2.2", "mpileaks@2.3")
|
||||
|
||||
assert not any(x.satisfies("mpileaks@2.2") for x in e.user_specs)
|
||||
assert any(x.satisfies("mpileaks@2.3") for x in e.user_specs)
|
||||
assert not any(x.intersects("mpileaks@2.2") for x in e.user_specs)
|
||||
assert any(x.intersects("mpileaks@2.3") for x in e.user_specs)
|
||||
|
||||
|
||||
def test_change_multiple_matches():
|
||||
@@ -97,8 +97,8 @@ def test_change_multiple_matches():
|
||||
|
||||
change("--match-spec", "mpileaks", "-a", "mpileaks%gcc")
|
||||
|
||||
assert all(x.satisfies("%gcc") for x in e.user_specs if x.name == "mpileaks")
|
||||
assert any(x.satisfies("%clang") for x in e.user_specs if x.name == "libelf")
|
||||
assert all(x.intersects("%gcc") for x in e.user_specs if x.name == "mpileaks")
|
||||
assert any(x.intersects("%clang") for x in e.user_specs if x.name == "libelf")
|
||||
|
||||
|
||||
def test_env_add_virtual():
|
||||
@@ -111,7 +111,7 @@ def test_env_add_virtual():
|
||||
hashes = e.concretized_order
|
||||
assert len(hashes) == 1
|
||||
spec = e.specs_by_hash[hashes[0]]
|
||||
assert spec.satisfies("mpi")
|
||||
assert spec.intersects("mpi")
|
||||
|
||||
|
||||
def test_env_add_nonexistant_fails():
|
||||
@@ -687,7 +687,7 @@ def test_env_with_config():
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
assert any(x.intersects("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
def test_with_config_bad_include():
|
||||
@@ -1630,9 +1630,9 @@ def test_stack_concretize_extraneous_deps(tmpdir, config, mock_packages):
|
||||
assert concrete.concrete
|
||||
assert not user.concrete
|
||||
if user.name == "libelf":
|
||||
assert not concrete.satisfies("^mpi", strict=True)
|
||||
assert not concrete.satisfies("^mpi")
|
||||
elif user.name == "mpileaks":
|
||||
assert concrete.satisfies("^mpi", strict=True)
|
||||
assert concrete.satisfies("^mpi")
|
||||
|
||||
|
||||
def test_stack_concretize_extraneous_variants(tmpdir, config, mock_packages):
|
||||
|
@@ -16,8 +16,6 @@
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def executables_found(monkeypatch):
|
||||
@@ -39,7 +37,7 @@ def _win_exe_ext():
|
||||
|
||||
|
||||
def define_plat_exe(exe):
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
exe += ".bat"
|
||||
return exe
|
||||
|
||||
|
@@ -276,7 +276,7 @@ def test_install_commit(mock_git_version_info, install_mockery, mock_packages, m
|
||||
assert filename in installed
|
||||
with open(spec.prefix.bin.join(filename), "r") as f:
|
||||
content = f.read().strip()
|
||||
assert content == "[]" # contents are weird for another test
|
||||
assert content == "[0]" # contents are weird for another test
|
||||
|
||||
|
||||
def test_install_overwrite_multiple(
|
||||
@@ -793,7 +793,7 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
# ^b
|
||||
# a
|
||||
# ^b
|
||||
e = ev.create("test")
|
||||
e = ev.create("test", with_view=False)
|
||||
e.add("mpileaks")
|
||||
e.add("libelf@0.8.10") # so env has both root and dep libelf specs
|
||||
e.add("a")
|
||||
@@ -829,14 +829,11 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
# Assert using --no-add with a spec not in the env fails
|
||||
inst_out = install("--no-add", "boost", fail_on_error=False, output=str)
|
||||
|
||||
assert "You can add it to the environment with 'spack add " in inst_out
|
||||
assert "You can add specs to the environment with 'spack add " in inst_out
|
||||
|
||||
# Without --add, ensure that install fails if the spec matches more
|
||||
# than one root
|
||||
with pytest.raises(ev.SpackEnvironmentError) as err:
|
||||
inst_out = install("a", output=str)
|
||||
|
||||
assert "a matches multiple specs in the env" in str(err)
|
||||
# Without --add, ensure that two packages "a" get installed
|
||||
inst_out = install("a", output=str)
|
||||
assert len([x for x in e.all_specs() if x.installed and x.name == "a"]) == 2
|
||||
|
||||
# Install an unambiguous dependency spec (that already exists as a dep
|
||||
# in the environment) and make sure it gets installed (w/ deps),
|
||||
@@ -1177,6 +1174,6 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
args = parser.parse_args(
|
||||
["--cdash-upload-url", "https://blahblah/submit.php?project=debugging", "a"]
|
||||
)
|
||||
_, specs = spack.cmd.install.specs_from_cli(args, {})
|
||||
specs = spack.cmd.install.concrete_specs_from_cli(args, {})
|
||||
filename = spack.cmd.install.report_filename(args, specs)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
from spack.main import SpackCommand
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
resource = SpackCommand("resource")
|
||||
|
||||
#: these are hashes used in mock packages
|
||||
@@ -23,7 +22,7 @@
|
||||
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
|
||||
"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730",
|
||||
]
|
||||
if not is_windows
|
||||
if sys.platform != "win32"
|
||||
else [
|
||||
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
|
||||
"1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd",
|
||||
@@ -68,7 +67,7 @@ def test_resource_list_only_hashes(mock_packages, capfd):
|
||||
def test_resource_show(mock_packages, capfd):
|
||||
test_hash = (
|
||||
"c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8"
|
||||
if not is_windows
|
||||
if sys.platform != "win32"
|
||||
else "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11"
|
||||
)
|
||||
with capfd.disabled():
|
||||
|
@@ -14,8 +14,6 @@
|
||||
import spack.extensions
|
||||
import spack.main
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
class Extension:
|
||||
"""Helper class to simplify the creation of simple command extension
|
||||
@@ -274,7 +272,7 @@ def test_variable_in_extension_path(config, working_env):
|
||||
os.environ["_MY_VAR"] = os.path.join("my", "var")
|
||||
ext_paths = [os.path.join("~", "${_MY_VAR}", "spack-extension-1")]
|
||||
# Home env variable is USERPROFILE on Windows
|
||||
home_env = "USERPROFILE" if is_windows else "HOME"
|
||||
home_env = "USERPROFILE" if sys.platform == "win32" else "HOME"
|
||||
expected_ext_paths = [
|
||||
os.path.join(os.environ[home_env], os.environ["_MY_VAR"], "spack-extension-1")
|
||||
]
|
||||
|
@@ -58,6 +58,7 @@ def test_arm_version_detection(version_str, expected_version):
|
||||
[
|
||||
("Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n", "8.4.6"),
|
||||
("Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
|
||||
("Cray clang Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
|
||||
("Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n", "8.4.6"),
|
||||
],
|
||||
)
|
||||
@@ -487,3 +488,27 @@ def _module(cmd, *args):
|
||||
def test_aocc_version_detection(version_str, expected_version):
|
||||
version = spack.compilers.aocc.Aocc.extract_version_from_output(version_str)
|
||||
assert version == expected_version
|
||||
|
||||
|
||||
@pytest.mark.regression("33901")
|
||||
@pytest.mark.parametrize(
|
||||
"version_str",
|
||||
[
|
||||
(
|
||||
"Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
|
||||
"Target: x86_64-apple-darwin18.7.0\n"
|
||||
"Thread model: posix\n"
|
||||
"InstalledDir: "
|
||||
"/Applications/Xcode.app/Contents/Developer/Toolchains/"
|
||||
"XcodeDefault.xctoolchain/usr/bin\n"
|
||||
),
|
||||
(
|
||||
"Apple LLVM version 7.0.2 (clang-700.1.81)\n"
|
||||
"Target: x86_64-apple-darwin15.2.0\n"
|
||||
"Thread model: posix\n"
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_apple_clang_not_detected_as_cce(version_str):
|
||||
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
|
||||
assert version == "unknown"
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -25,8 +26,6 @@
|
||||
from spack.spec import Spec
|
||||
from spack.version import ver
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def check_spec(abstract, concrete):
|
||||
if abstract.versions.concrete:
|
||||
@@ -294,11 +293,11 @@ def test_concretize_with_provides_when(self):
|
||||
we ask for some advanced version.
|
||||
"""
|
||||
repo = spack.repo.path
|
||||
assert not any(s.satisfies("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
|
||||
assert not any(s.satisfies("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
|
||||
assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@2"))
|
||||
assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@3"))
|
||||
assert not any(s.satisfies("mpich2") for s in repo.providers_for("mpi@3"))
|
||||
assert not any(s.intersects("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
|
||||
assert not any(s.intersects("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
|
||||
assert not any(s.intersects("mpich@:1") for s in repo.providers_for("mpi@2"))
|
||||
assert not any(s.intersects("mpich@:1") for s in repo.providers_for("mpi@3"))
|
||||
assert not any(s.intersects("mpich2") for s in repo.providers_for("mpi@3"))
|
||||
|
||||
def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
""" """
|
||||
@@ -332,6 +331,24 @@ def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
for spec in [client, cmake]:
|
||||
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
|
||||
|
||||
def test_compiler_flags_differ_identical_compilers(self):
|
||||
# Correct arch to use test compiler that has flags
|
||||
spec = Spec("a %clang@12.2.0 platform=test os=fe target=fe")
|
||||
|
||||
# Get the compiler that matches the spec (
|
||||
compiler = spack.compilers.compiler_for_spec("clang@12.2.0", spec.architecture)
|
||||
# Clear cache for compiler config since it has its own cache mechanism outside of config
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
# Configure spack to have two identical compilers with different flags
|
||||
default_dict = spack.compilers._to_dict(compiler)
|
||||
different_dict = copy.deepcopy(default_dict)
|
||||
different_dict["compiler"]["flags"] = {"cflags": "-O2"}
|
||||
|
||||
with spack.config.override("compilers", [different_dict]):
|
||||
spec.concretize()
|
||||
assert spec.satisfies("cflags=-O2")
|
||||
|
||||
def test_concretize_compiler_flag_propagate(self):
|
||||
spec = Spec("hypre cflags=='-g' ^openblas")
|
||||
spec.concretize()
|
||||
@@ -1138,7 +1155,7 @@ def test_custom_compiler_version(self):
|
||||
def test_all_patches_applied(self):
|
||||
uuidpatch = (
|
||||
"a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea"
|
||||
if not is_windows
|
||||
if sys.platform != "win32"
|
||||
else "d0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e"
|
||||
)
|
||||
localpatch = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
@@ -1257,6 +1274,18 @@ def test_reuse_installed_packages_when_package_def_changes(
|
||||
# Structure and package hash will be different without reuse
|
||||
assert root.dag_hash() != new_root_without_reuse.dag_hash()
|
||||
|
||||
def test_reuse_with_flags(self, mutable_database, mutable_config):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer does not reuse")
|
||||
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
spec = Spec("a cflags=-g cxxflags=-g").concretized()
|
||||
spack.store.db.add(spec, None)
|
||||
|
||||
testspec = Spec("a cflags=-g")
|
||||
testspec.concretize()
|
||||
assert testspec == spec
|
||||
|
||||
@pytest.mark.regression("20784")
|
||||
def test_concretization_of_test_dependencies(self):
|
||||
# With clingo we emit dependency_conditions regardless of the type
|
||||
@@ -1445,7 +1474,7 @@ def test_concrete_specs_are_not_modified_on_reuse(
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.spec.Spec(spec_str).concretized()
|
||||
assert s.installed is expect_installed
|
||||
assert s.satisfies(spec_str, strict=True)
|
||||
assert s.satisfies(spec_str)
|
||||
|
||||
@pytest.mark.regression("26721,19736")
|
||||
def test_sticky_variant_in_package(self):
|
||||
@@ -2047,3 +2076,85 @@ def test_external_python_extension_find_unified_python(self):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in ["py-extension1", "python"]]
|
||||
specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
assert specs[0]["python"] == specs[1]["python"]
|
||||
|
||||
@pytest.mark.regression("36190")
|
||||
@pytest.mark.parametrize(
|
||||
"specs",
|
||||
[
|
||||
["mpileaks^ callpath ^dyninst@8.1.1:8 ^mpich2@1.3:1"],
|
||||
["multivalue-variant ^a@2:2"],
|
||||
["v1-consumer ^conditional-provider@1:1 +disable-v1"],
|
||||
],
|
||||
)
|
||||
def test_result_specs_is_not_empty(self, specs):
|
||||
"""Check that the implementation of "result.specs" is correct in cases where we
|
||||
know a concretization exists.
|
||||
"""
|
||||
specs = [spack.spec.Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, specs, reuse=[])
|
||||
|
||||
assert result.specs
|
||||
assert not result.unsolved_specs
|
||||
|
||||
@pytest.mark.regression("36339")
|
||||
def test_compiler_match_constraints_when_selected(self):
|
||||
"""Test that, when multiple compilers with the same name are in the configuration
|
||||
we ensure that the selected one matches all the required constraints.
|
||||
"""
|
||||
compiler_configuration = [
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@11.1.0",
|
||||
"paths": {
|
||||
"cc": "/usr/bin/gcc",
|
||||
"cxx": "/usr/bin/g++",
|
||||
"f77": "/usr/bin/gfortran",
|
||||
"fc": "/usr/bin/gfortran",
|
||||
},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
},
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@12.1.0",
|
||||
"paths": {
|
||||
"cc": "/usr/bin/gcc",
|
||||
"cxx": "/usr/bin/g++",
|
||||
"f77": "/usr/bin/gfortran",
|
||||
"fc": "/usr/bin/gfortran",
|
||||
},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
},
|
||||
]
|
||||
spack.config.set("compilers", compiler_configuration)
|
||||
s = spack.spec.Spec("a %gcc@:11").concretized()
|
||||
assert s.compiler.version == ver("11.1.0"), s
|
||||
|
||||
@pytest.mark.regression("36339")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows")
|
||||
def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
"""Test that, when a compiler has a completely made up version, we can use its
|
||||
'real version' to detect targets and don't raise during concretization.
|
||||
"""
|
||||
gcc_path = mock_executable("gcc", output="echo 9")
|
||||
compiler_configuration = [
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
}
|
||||
]
|
||||
spack.config.set("compilers", compiler_configuration)
|
||||
s = spack.spec.Spec("a %gcc@foo").concretized()
|
||||
assert s.compiler.version == ver("foo")
|
||||
|
@@ -107,12 +107,28 @@ def fake_installs(monkeypatch, tmpdir):
|
||||
)
|
||||
|
||||
|
||||
def test_one_package_multiple_reqs(concretize_scope, test_repo):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
require:
|
||||
- "@2.4"
|
||||
- "~shared"
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
y_spec = Spec("y").concretized()
|
||||
assert y_spec.satisfies("@2.4~shared")
|
||||
|
||||
|
||||
def test_requirement_isnt_optional(concretize_scope, test_repo):
|
||||
"""If a user spec requests something that directly conflicts
|
||||
with a requirement, make sure we get an error.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
|
@@ -54,8 +54,6 @@
|
||||
from spack.util.pattern import Bunch
|
||||
from spack.util.web import FetchError
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def ensure_configuration_fixture_run_before(request):
|
||||
"""Ensure that fixture mutating the configuration run before the one where
|
||||
@@ -159,7 +157,9 @@ def latest_commit():
|
||||
return git("rev-list", "-n1", "HEAD", output=str, error=str).strip()
|
||||
|
||||
# Add two commits on main branch
|
||||
write_file(filename, "[]")
|
||||
|
||||
# A commit without a previous version counts as "0"
|
||||
write_file(filename, "[0]")
|
||||
git("add", filename)
|
||||
commit("first commit")
|
||||
commits.append(latest_commit())
|
||||
@@ -621,7 +621,7 @@ def ensure_debug(monkeypatch):
|
||||
tty.set_debug(current_debug_level)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=is_windows, scope="session")
|
||||
@pytest.fixture(autouse=sys.platform == "win32", scope="session")
|
||||
def platform_config():
|
||||
spack.config.add_default_platform_scope(spack.platforms.real_host().name)
|
||||
|
||||
@@ -633,7 +633,7 @@ def default_config():
|
||||
This ensures we can test the real default configuration without having
|
||||
tests fail when the user overrides the defaults that we test against."""
|
||||
defaults_path = os.path.join(spack.paths.etc_path, "defaults")
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
defaults_path = os.path.join(defaults_path, "windows")
|
||||
with spack.config.use_configuration(defaults_path) as defaults_config:
|
||||
yield defaults_config
|
||||
@@ -690,7 +690,7 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
tmpdir.ensure("user", dir=True)
|
||||
|
||||
# Slightly modify config.yaml and compilers.yaml
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
locks = False
|
||||
else:
|
||||
locks = True
|
||||
@@ -1675,11 +1675,11 @@ def mock_executable(tmpdir):
|
||||
"""
|
||||
import jinja2
|
||||
|
||||
shebang = "#!/bin/sh\n" if not is_windows else "@ECHO OFF"
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
f = tmpdir.ensure(*subdir, dir=True).join(name)
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
f += ".bat"
|
||||
t = jinja2.Template("{{ shebang }}{{ output }}\n")
|
||||
f.write(t.render(shebang=shebang, output=output))
|
||||
|
@@ -33,8 +33,6 @@
|
||||
from spack.schema.database_index import schema
|
||||
from spack.util.executable import Executable
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
pytestmark = pytest.mark.db
|
||||
|
||||
|
||||
@@ -451,7 +449,7 @@ def test_005_db_exists(database):
|
||||
lock_file = os.path.join(database.root, ".spack-db", "lock")
|
||||
assert os.path.exists(str(index_file))
|
||||
# Lockfiles not currently supported on Windows
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
assert os.path.exists(str(lock_file))
|
||||
|
||||
with open(index_file) as fd:
|
||||
|
@@ -86,13 +86,13 @@ def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mo
|
||||
e.concretize()
|
||||
e.write()
|
||||
|
||||
assert any(x.satisfies("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||
assert any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||
|
||||
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
|
||||
e.write()
|
||||
|
||||
assert any(x.satisfies("mpileaks@2.2%gcc") for x in e.user_specs)
|
||||
assert not any(x.satisfies("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||
assert any(x.intersects("mpileaks@2.2%gcc") for x in e.user_specs)
|
||||
assert not any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||
|
||||
|
||||
def test_env_change_spec_in_matrix_raises_error(
|
||||
|
@@ -230,16 +230,6 @@ def test_path_manipulation(env):
|
||||
assert os.environ["PATH_LIST_WITH_DUPLICATES"].count("/duplicate") == 1
|
||||
|
||||
|
||||
def test_extra_arguments(env):
|
||||
"""Tests that we can attach extra arguments to any command."""
|
||||
env.set("A", "dummy value", who="Pkg1")
|
||||
for x in env:
|
||||
assert "who" in x.args
|
||||
|
||||
env.apply_modifications()
|
||||
assert "dummy value" == os.environ["A"]
|
||||
|
||||
|
||||
def test_extend(env):
|
||||
"""Tests that we can construct a list of environment modifications
|
||||
starting from another list.
|
||||
@@ -489,7 +479,7 @@ def test_from_environment_diff(before, after, search_list):
|
||||
assert item in mod
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="LMod not supported on Windows")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Lmod not supported on Windows")
|
||||
@pytest.mark.regression("15775")
|
||||
def test_exclude_lmod_variables():
|
||||
# Construct the list of environment modifications
|
||||
|
@@ -24,8 +24,6 @@
|
||||
import spack.store
|
||||
import spack.util.lock as lk
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def _mock_repo(root, namespace):
|
||||
"""Create an empty repository at the specified root
|
||||
@@ -528,7 +526,7 @@ def _repoerr(repo, name):
|
||||
|
||||
# The call to install_tree will raise the exception since not mocking
|
||||
# creation of dependency package files within *install* directories.
|
||||
with pytest.raises(IOError, match=path if not is_windows else ""):
|
||||
with pytest.raises(IOError, match=path if sys.platform != "win32" else ""):
|
||||
inst.dump_packages(spec, path)
|
||||
|
||||
# Now try the error path, which requires the mock directory structure
|
||||
@@ -879,7 +877,7 @@ def _chgrp(path, group, follow_symlinks=True):
|
||||
metadatadir = spack.store.layout.metadata_path(spec)
|
||||
# Regex matching with Windows style paths typically fails
|
||||
# so we skip the match check here
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
metadatadir = None
|
||||
# Should fail with a "not a directory" error
|
||||
with pytest.raises(OSError, match=metadatadir):
|
||||
|
@@ -11,9 +11,8 @@
|
||||
import spack.paths
|
||||
from spack.compiler import _parse_non_system_link_dirs
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
drive = ""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
match = re.search(r"[A-Za-z]:", spack.paths.test_path)
|
||||
if match:
|
||||
drive = match.group()
|
||||
@@ -210,7 +209,7 @@ def test_obscure_parsing_rules():
|
||||
]
|
||||
|
||||
# TODO: add a comment explaining why this happens
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
paths.remove(os.path.join(root, "second", "path"))
|
||||
|
||||
check_link_paths("obscure-parsing-rules.txt", paths)
|
||||
|
@@ -13,8 +13,6 @@
|
||||
|
||||
import spack.paths
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def library_list():
|
||||
@@ -28,7 +26,7 @@ def library_list():
|
||||
"/dir3/libz.so",
|
||||
"libmpi.so.20.10.1", # shared object libraries may be versioned
|
||||
]
|
||||
if not is_windows
|
||||
if sys.platform != "win32"
|
||||
else [
|
||||
"/dir1/liblapack.lib",
|
||||
"/dir2/libpython3.6.dll",
|
||||
@@ -59,10 +57,10 @@ def header_list():
|
||||
|
||||
|
||||
# TODO: Remove below when llnl.util.filesystem.find_libraries becomes spec aware
|
||||
plat_static_ext = "lib" if is_windows else "a"
|
||||
plat_static_ext = "lib" if sys.platform == "win32" else "a"
|
||||
|
||||
|
||||
plat_shared_ext = "dll" if is_windows else "so"
|
||||
plat_shared_ext = "dll" if sys.platform == "win32" else "so"
|
||||
|
||||
|
||||
plat_apple_shared_ext = "dylib"
|
||||
@@ -78,7 +76,8 @@ def test_joined_and_str(self, library_list):
|
||||
expected = " ".join(
|
||||
[
|
||||
"/dir1/liblapack.%s" % plat_static_ext,
|
||||
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
|
||||
"/dir2/libpython3.6.%s"
|
||||
% (plat_apple_shared_ext if sys.platform != "win32" else "dll"),
|
||||
"/dir1/libblas.%s" % plat_static_ext,
|
||||
"/dir3/libz.%s" % plat_shared_ext,
|
||||
"libmpi.%s.20.10.1" % plat_shared_ext,
|
||||
@@ -93,7 +92,8 @@ def test_joined_and_str(self, library_list):
|
||||
expected = ";".join(
|
||||
[
|
||||
"/dir1/liblapack.%s" % plat_static_ext,
|
||||
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
|
||||
"/dir2/libpython3.6.%s"
|
||||
% (plat_apple_shared_ext if sys.platform != "win32" else "dll"),
|
||||
"/dir1/libblas.%s" % plat_static_ext,
|
||||
"/dir3/libz.%s" % plat_shared_ext,
|
||||
"libmpi.%s.20.10.1" % plat_shared_ext,
|
||||
|
@@ -62,8 +62,7 @@
|
||||
import llnl.util.multiproc as mp
|
||||
from llnl.util.filesystem import getuid, touch
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
@@ -127,7 +126,7 @@ def make_readable(*paths):
|
||||
# stat.S_IREAD constants or a corresponding integer value). All other
|
||||
# bits are ignored."
|
||||
for path in paths:
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
mode = 0o555 if os.path.isdir(path) else 0o444
|
||||
else:
|
||||
mode = stat.S_IREAD
|
||||
@@ -136,7 +135,7 @@ def make_readable(*paths):
|
||||
|
||||
def make_writable(*paths):
|
||||
for path in paths:
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
mode = 0o755 if os.path.isdir(path) else 0o744
|
||||
else:
|
||||
mode = stat.S_IWRITE
|
||||
@@ -616,7 +615,7 @@ def test_read_lock_read_only_dir_writable_lockfile(lock_dir, lock_path):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(False if is_windows else getuid() == 0, reason="user is root")
|
||||
@pytest.mark.skipif(False if sys.platform == "win32" else getuid() == 0, reason="user is root")
|
||||
def test_read_lock_no_lockfile(lock_dir, lock_path):
|
||||
"""read-only directory, no lockfile (so can't create)."""
|
||||
with read_only(lock_dir):
|
||||
|
@@ -55,7 +55,7 @@ def test_modules_written_with_proper_permissions(
|
||||
spec = spack.spec.Spec("mpileaks").concretized()
|
||||
|
||||
# The code tested is common to all module types, but has to be tested from
|
||||
# one. TCL picked at random
|
||||
# one. Tcl picked at random
|
||||
generator = spack.modules.tcl.TclModulefileWriter(spec, "default")
|
||||
generator.write()
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user