Compare commits
526 Commits
e4s-23.05
...
bugfix/env
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c6bb8cfc3 | ||
|
|
9244ecacf0 | ||
|
|
1df4afb53f | ||
|
|
4991f0e484 | ||
|
|
09fd7d68eb | ||
|
|
2ace8a55c1 | ||
|
|
861acb9467 | ||
|
|
eea743de46 | ||
|
|
e2b6e5a7ec | ||
|
|
2f2dc3695c | ||
|
|
6eb5e57199 | ||
|
|
9a047eb95f | ||
|
|
ef42fd7a2f | ||
|
|
e642c2ea28 | ||
|
|
f27d012e0c | ||
|
|
c638311796 | ||
|
|
2a02bea405 | ||
|
|
219b42d991 | ||
|
|
c290ec1f62 | ||
|
|
e7ede86733 | ||
|
|
e3e7609af4 | ||
|
|
49d7ebec36 | ||
|
|
7c3d82d819 | ||
|
|
1c0fbec9ce | ||
|
|
ca4d60ae25 | ||
|
|
dc571e20d6 | ||
|
|
1485275d0c | ||
|
|
1afbf72037 | ||
|
|
407fd80f95 | ||
|
|
62525d9076 | ||
|
|
c2371263d1 | ||
|
|
5a870182ec | ||
|
|
e33ad83256 | ||
|
|
0352a1df5d | ||
|
|
ade44bce62 | ||
|
|
ddb29ebc34 | ||
|
|
19a62630e5 | ||
|
|
5626802aa0 | ||
|
|
f68063afbc | ||
|
|
8103d019d6 | ||
|
|
ce89cdd9d7 | ||
|
|
20d9b356f0 | ||
|
|
3401438a3a | ||
|
|
dcf1999d22 | ||
|
|
9e3c3ae298 | ||
|
|
40d6b84b4d | ||
|
|
2db09f27af | ||
|
|
6979d6a96f | ||
|
|
deffd2acc9 | ||
|
|
988f71f434 | ||
|
|
4fe76f973a | ||
|
|
8e4e6ad529 | ||
|
|
3586a2dbe3 | ||
|
|
4648939043 | ||
|
|
746eaaf01a | ||
|
|
bd2f78ae9a | ||
|
|
a4ebe01dec | ||
|
|
94e9e18558 | ||
|
|
d2e0ac4d1f | ||
|
|
36321fef1c | ||
|
|
e879877878 | ||
|
|
f0bce3eb25 | ||
|
|
316bfd8b7d | ||
|
|
92593fecd5 | ||
|
|
8db5fecdf5 | ||
|
|
eee696f320 | ||
|
|
8689cf392f | ||
|
|
15d4cce2eb | ||
|
|
45fbb82d1a | ||
|
|
2861c89b89 | ||
|
|
135bfeeb27 | ||
|
|
8fa9c66a7d | ||
|
|
5e6174cbe2 | ||
|
|
b4ad883b0d | ||
|
|
a681111a23 | ||
|
|
d2436afb66 | ||
|
|
e43444cbb6 | ||
|
|
8c0d947114 | ||
|
|
5ba4a2b83a | ||
|
|
da45073ef9 | ||
|
|
61e17fb36d | ||
|
|
9f13a90dd2 | ||
|
|
ef4b35ea63 | ||
|
|
66187c8a6e | ||
|
|
c8d95512fc | ||
|
|
c74fa648b9 | ||
|
|
4cc5e9cac6 | ||
|
|
41345d18f9 | ||
|
|
0dd1316b68 | ||
|
|
d8cc185e22 | ||
|
|
061051270c | ||
|
|
61445159db | ||
|
|
7fa3c7f0fa | ||
|
|
9c0fe30f42 | ||
|
|
d00010819f | ||
|
|
248b05b32a | ||
|
|
8232e934e9 | ||
|
|
9d005839af | ||
|
|
a7e5c73608 | ||
|
|
7896625919 | ||
|
|
fb43cb8166 | ||
|
|
28f68e5d11 | ||
|
|
1199eeed0b | ||
|
|
8ffeb4900b | ||
|
|
456550da3f | ||
|
|
b2676fe2dd | ||
|
|
8561ec6249 | ||
|
|
5b775d82ac | ||
|
|
b43088cc16 | ||
|
|
237eab136a | ||
|
|
ffffa2794b | ||
|
|
433b44403f | ||
|
|
fa2e1c0653 | ||
|
|
00257f6824 | ||
|
|
3b8366f3d3 | ||
|
|
a73f511404 | ||
|
|
c823e01baf | ||
|
|
4188080899 | ||
|
|
ef6ea2c93f | ||
|
|
3c672905d0 | ||
|
|
ee106c747f | ||
|
|
295726e6b8 | ||
|
|
2654d64a3c | ||
|
|
d91ec8500f | ||
|
|
c354cc51d0 | ||
|
|
d5747a61e7 | ||
|
|
e88c747abc | ||
|
|
cfe9e5bca4 | ||
|
|
48f7655a62 | ||
|
|
a1111a9858 | ||
|
|
b8b9a798bf | ||
|
|
7a1e94c775 | ||
|
|
8c4b2173d2 | ||
|
|
4c4cd7b3ea | ||
|
|
e92554414b | ||
|
|
d165e2c94b | ||
|
|
a97bd31afe | ||
|
|
d7719b26f9 | ||
|
|
855c0fd9e0 | ||
|
|
4156397027 | ||
|
|
b4bbe5e305 | ||
|
|
f5b595071e | ||
|
|
b6f2184cce | ||
|
|
9288067380 | ||
|
|
ddfc43be96 | ||
|
|
63cad5d338 | ||
|
|
436ecdfb19 | ||
|
|
06817600e4 | ||
|
|
4ae1a73d54 | ||
|
|
f29aab0d03 | ||
|
|
cea1b3123e | ||
|
|
b22ccf279d | ||
|
|
81e15ce36e | ||
|
|
8907e52933 | ||
|
|
80cefedac5 | ||
|
|
b85a66f77a | ||
|
|
a0ba3d890a | ||
|
|
315873cbd3 | ||
|
|
e05095af90 | ||
|
|
e0d6a73f96 | ||
|
|
6ebfb41ad9 | ||
|
|
d0aa01c807 | ||
|
|
1265c7df47 | ||
|
|
91e3f14959 | ||
|
|
5f03eb650d | ||
|
|
e0e6133444 | ||
|
|
ee68baf254 | ||
|
|
785c1a2070 | ||
|
|
79656655ba | ||
|
|
74921788a8 | ||
|
|
b313b28e64 | ||
|
|
5f1bc15e80 | ||
|
|
fa9fb60df3 | ||
|
|
e759e6c410 | ||
|
|
f41446258a | ||
|
|
268649654d | ||
|
|
12e249f64e | ||
|
|
c34cd76f2a | ||
|
|
815b210fc8 | ||
|
|
e5d5efb4c1 | ||
|
|
0aa4b4d990 | ||
|
|
01c1d334ae | ||
|
|
717fc11a46 | ||
|
|
d21c49e329 | ||
|
|
6937d9dddc | ||
|
|
4c2531d5fb | ||
|
|
62fd890c52 | ||
|
|
4772fd7723 | ||
|
|
7c11faceb0 | ||
|
|
053550e28a | ||
|
|
3ed7258447 | ||
|
|
a5cf5baa9e | ||
|
|
ec8039cc74 | ||
|
|
9bfa840c27 | ||
|
|
9865f42335 | ||
|
|
dba2829871 | ||
|
|
8c0e1fbed9 | ||
|
|
187488b75b | ||
|
|
2aa35fef3e | ||
|
|
d373fc36ae | ||
|
|
e483762015 | ||
|
|
5840a00000 | ||
|
|
110f836927 | ||
|
|
d6765f66ae | ||
|
|
19dac780e8 | ||
|
|
b82b549c59 | ||
|
|
b376401ece | ||
|
|
7d956dbe9e | ||
|
|
6db1d84bb0 | ||
|
|
2094fa3056 | ||
|
|
3d255bc213 | ||
|
|
5538dda722 | ||
|
|
1c0d89bf25 | ||
|
|
4cc0199fbb | ||
|
|
edb8226fff | ||
|
|
ef972cf642 | ||
|
|
50c13541e4 | ||
|
|
fd5d7cea6e | ||
|
|
526314b275 | ||
|
|
7b37c30019 | ||
|
|
dc03c3ad9e | ||
|
|
61b485f75d | ||
|
|
e24151783f | ||
|
|
ed9714e5ae | ||
|
|
ea620a083c | ||
|
|
504a8be666 | ||
|
|
d460870c77 | ||
|
|
f0f77251b3 | ||
|
|
bdd454b70b | ||
|
|
aea6662774 | ||
|
|
fe6bcb36c7 | ||
|
|
2474a2efe1 | ||
|
|
4cfd49019c | ||
|
|
7beae8af30 | ||
|
|
22fc5d2039 | ||
|
|
b70fc461a4 | ||
|
|
e756436d7c | ||
|
|
8dd87e2572 | ||
|
|
853bf95bd2 | ||
|
|
1c80d07fd2 | ||
|
|
6fd8001604 | ||
|
|
c08f9fd6fc | ||
|
|
c3fb998414 | ||
|
|
3368a98210 | ||
|
|
606b7c7f16 | ||
|
|
2f4e66be09 | ||
|
|
9ce3e8707c | ||
|
|
d6a96745ee | ||
|
|
a0fcdd092b | ||
|
|
e17d09e607 | ||
|
|
847d67f223 | ||
|
|
7ae0e06a62 | ||
|
|
d3df97df8b | ||
|
|
7d5d075809 | ||
|
|
237a0d8999 | ||
|
|
6952ed9950 | ||
|
|
3e2d1bd413 | ||
|
|
9dfba4659e | ||
|
|
7fca252aa4 | ||
|
|
fa23a0228f | ||
|
|
ed76966a3a | ||
|
|
2015a51d1a | ||
|
|
34b8fe827e | ||
|
|
6f1ed9b2e4 | ||
|
|
dd00f50943 | ||
|
|
f0ec625321 | ||
|
|
d406c371a8 | ||
|
|
42d374a34d | ||
|
|
d90e4fcc3d | ||
|
|
a44fde9dc9 | ||
|
|
9ac8841dab | ||
|
|
a1f87638ec | ||
|
|
3b55e0a65d | ||
|
|
42667fe7fa | ||
|
|
cd27611d2f | ||
|
|
b111d2172e | ||
|
|
055263fa3c | ||
|
|
f34f207bdc | ||
|
|
0c9f0fd40d | ||
|
|
24d5b1e645 | ||
|
|
616f7bcaef | ||
|
|
dace0316a2 | ||
|
|
3bb86418b8 | ||
|
|
6f6489a2c7 | ||
|
|
543b697df1 | ||
|
|
042dc2e1d8 | ||
|
|
f745e49d9a | ||
|
|
eda21cdfba | ||
|
|
bc8b026072 | ||
|
|
0f84782fcc | ||
|
|
43b86ce282 | ||
|
|
d30698d9a8 | ||
|
|
8e9efa86c8 | ||
|
|
84faf5a6cf | ||
|
|
9428749a3c | ||
|
|
efdac68c28 | ||
|
|
5398c31e82 | ||
|
|
188168c476 | ||
|
|
4af84ac208 | ||
|
|
deb8b51098 | ||
|
|
0d582b2ea9 | ||
|
|
f88b01c34b | ||
|
|
0533c6a1b8 | ||
|
|
f73d5c2b0e | ||
|
|
567d0ee455 | ||
|
|
577df6f498 | ||
|
|
8790efbcfe | ||
|
|
212b1edb6b | ||
|
|
d85a27f317 | ||
|
|
5622afbfd1 | ||
|
|
f345038317 | ||
|
|
e43d4cfee0 | ||
|
|
7070658e2a | ||
|
|
fc4b032fb4 | ||
|
|
8c97d8ad3f | ||
|
|
26107fe6b2 | ||
|
|
9278c0df21 | ||
|
|
37e95713f4 | ||
|
|
3ae8a3a517 | ||
|
|
031af84e90 | ||
|
|
7d4b65491d | ||
|
|
3038d1e7cd | ||
|
|
b2e6ef97ce | ||
|
|
e55236ce5b | ||
|
|
68dfd6ba6e | ||
|
|
38d2459f94 | ||
|
|
e309f367af | ||
|
|
3b59c95323 | ||
|
|
fddaeadff8 | ||
|
|
c85eaf9dc5 | ||
|
|
ddec7f8aec | ||
|
|
f057d7154b | ||
|
|
a102950d67 | ||
|
|
783be9b350 | ||
|
|
27c8135207 | ||
|
|
77ce4701b9 | ||
|
|
73ad3f729e | ||
|
|
1e7a64ad85 | ||
|
|
3a5864bcdb | ||
|
|
7e13a7dccb | ||
|
|
e3249fa155 | ||
|
|
0c20760576 | ||
|
|
7ee7995493 | ||
|
|
ba1fac1c31 | ||
|
|
b05f0ecb6f | ||
|
|
d5c66b75c3 | ||
|
|
98303d6956 | ||
|
|
4622d638a6 | ||
|
|
02023265fc | ||
|
|
8a075998f8 | ||
|
|
f2f48b1872 | ||
|
|
168d63c447 | ||
|
|
c25d4cbc1d | ||
|
|
ccb07538f7 | ||
|
|
1356b13b2f | ||
|
|
935f862863 | ||
|
|
9f6d9df302 | ||
|
|
65d33c02a1 | ||
|
|
40073e7b21 | ||
|
|
752e02e2f2 | ||
|
|
d717b3a33f | ||
|
|
9817f24c9a | ||
|
|
1f7c4b0557 | ||
|
|
6c42d2b7f7 | ||
|
|
8df036a5a5 | ||
|
|
582ebee74c | ||
|
|
1017b9ddde | ||
|
|
80ae73119d | ||
|
|
1d88f690a4 | ||
|
|
fbb271d804 | ||
|
|
d6aac873b7 | ||
|
|
ab3ffd9361 | ||
|
|
3b9454a5cc | ||
|
|
c8eb0f9361 | ||
|
|
fb0f14eb06 | ||
|
|
e489ee4e2e | ||
|
|
fcd49f2f08 | ||
|
|
b3268c2703 | ||
|
|
d1bfcfafe3 | ||
|
|
490c9f5e16 | ||
|
|
85628d1474 | ||
|
|
720c34d18d | ||
|
|
cd175377ca | ||
|
|
b91ec05e13 | ||
|
|
3bb15f420b | ||
|
|
124a81df5b | ||
|
|
d9472c083d | ||
|
|
ac2a5ef4dd | ||
|
|
ea210a6acf | ||
|
|
afb3bef7af | ||
|
|
b5b5881426 | ||
|
|
76fc7915a8 | ||
|
|
e7798b619b | ||
|
|
8ecef12a20 | ||
|
|
694292ebbf | ||
|
|
7f18f6f8a1 | ||
|
|
0b12a480eb | ||
|
|
2d91a79af3 | ||
|
|
72fcee7227 | ||
|
|
d147ef231f | ||
|
|
1c7af83d32 | ||
|
|
b982dfc071 | ||
|
|
c0da8a00fc | ||
|
|
3f18f689d8 | ||
|
|
9dc4553cf3 | ||
|
|
9a99c94b75 | ||
|
|
682f0b2a54 | ||
|
|
dbab0c1ff5 | ||
|
|
2bf95f5340 | ||
|
|
55561405b8 | ||
|
|
8eef458cea | ||
|
|
64eea9d996 | ||
|
|
60b4e2128b | ||
|
|
2f8cea2792 | ||
|
|
06f9bcf734 | ||
|
|
ee2725762f | ||
|
|
eace0a177c | ||
|
|
80c7d74707 | ||
|
|
a6f5bf821d | ||
|
|
b214406253 | ||
|
|
5b003d80e5 | ||
|
|
185b2d3ee7 | ||
|
|
71bb2a1899 | ||
|
|
785c31b730 | ||
|
|
175da4a88a | ||
|
|
73fc1ef11c | ||
|
|
2d77e44f6f | ||
|
|
033599c4cd | ||
|
|
8096ed4b22 | ||
|
|
b49bfe25af | ||
|
|
8b2f34d802 | ||
|
|
3daed0d6a7 | ||
|
|
d6c1f75e8d | ||
|
|
c80a4c1ddc | ||
|
|
466abcb62d | ||
|
|
69e99f0c16 | ||
|
|
bbee6dfc58 | ||
|
|
2d60cf120b | ||
|
|
db17fc2f33 | ||
|
|
c62080d498 | ||
|
|
f9bbe549fa | ||
|
|
55d7fec69c | ||
|
|
e938907150 | ||
|
|
0c40b86e96 | ||
|
|
3d4cf0d8eb | ||
|
|
966e19d278 | ||
|
|
8f930462bd | ||
|
|
bf4fccee15 | ||
|
|
784771a008 | ||
|
|
e4a9d9ae5b | ||
|
|
a6886983dc | ||
|
|
93a34a9635 | ||
|
|
91a54029f9 | ||
|
|
5400b49ed6 | ||
|
|
c17fc3c0c1 | ||
|
|
6f248836ea | ||
|
|
693c1821b0 | ||
|
|
62afe3bd5a | ||
|
|
53a756d045 | ||
|
|
321b687ae6 | ||
|
|
c8617f0574 | ||
|
|
7843e2ead0 | ||
|
|
dca3d071d7 | ||
|
|
436f077482 | ||
|
|
ab3f705019 | ||
|
|
d739989ec8 | ||
|
|
52ee1967d6 | ||
|
|
1af7284b5d | ||
|
|
e1bcefd805 | ||
|
|
2159b0183d | ||
|
|
078fd225a9 | ||
|
|
83974828c7 | ||
|
|
2412f74557 | ||
|
|
db06d3621d | ||
|
|
c25170d2f9 | ||
|
|
b3dfe13670 | ||
|
|
6358e84b48 | ||
|
|
8e634d8e49 | ||
|
|
1a21376515 | ||
|
|
bf45a2b6d3 | ||
|
|
475ce955e7 | ||
|
|
5e44289787 | ||
|
|
e66888511f | ||
|
|
e9e5beee1f | ||
|
|
ffd134c09d | ||
|
|
bfadd5c9a5 | ||
|
|
16e9279420 | ||
|
|
ac0903ef9f | ||
|
|
648839dffd | ||
|
|
489a604920 | ||
|
|
2ac3435810 | ||
|
|
69ea180d26 | ||
|
|
f52f217df0 | ||
|
|
df74aa5d7e | ||
|
|
41932c53ae | ||
|
|
4296db794f | ||
|
|
9ab9302409 | ||
|
|
0187376e54 | ||
|
|
7340d2cb83 | ||
|
|
641d4477d5 | ||
|
|
3ff2fb69af | ||
|
|
e3024b1bcb | ||
|
|
e733b87865 | ||
|
|
919985dc1b | ||
|
|
d746f7d427 | ||
|
|
b6deab515b | ||
|
|
848220c4ba | ||
|
|
98462bd27e | ||
|
|
2e2515266d | ||
|
|
776ab13276 | ||
|
|
c2ce9a6d93 | ||
|
|
4e3ed56dfa | ||
|
|
dcfcc03497 | ||
|
|
125c20bc06 | ||
|
|
f7696a4480 | ||
|
|
a5d7667cb6 | ||
|
|
d45818ccff | ||
|
|
bcb7af6eb3 | ||
|
|
f438fb6c79 | ||
|
|
371a8a361a | ||
|
|
86b9ce1c88 | ||
|
|
05232034f5 | ||
|
|
7a3da0f606 | ||
|
|
d96406a161 | ||
|
|
ffa5962356 | ||
|
|
67e74da3ba |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,3 +5,8 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
||||
26
.github/workflows/unit_tests.yaml
vendored
26
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -87,17 +87,17 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,10 +152,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -176,7 +176,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -186,10 +186,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -211,6 +211,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
||||
11
.github/workflows/valid-style.yml
vendored
11
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -81,6 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
||||
16
.github/workflows/windows_python.yml
vendored
16
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,16 +33,16 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,16 +57,16 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,3 +1,221 @@
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **`requires()` directive and enhanced package requirements**
|
||||
|
||||
We've added some more enhancements to requirements in Spack (#36286).
|
||||
|
||||
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||
conditions are met:
|
||||
|
||||
```python
|
||||
requires(
|
||||
"%apple-clang",
|
||||
when="platform=darwin",
|
||||
msg="This package builds only with clang on macOS"
|
||||
)
|
||||
```
|
||||
|
||||
More on this in [the docs](
|
||||
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||
|
||||
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||
configuration or in an environment:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||
```
|
||||
|
||||
More details can be found [here](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||
|
||||
2. **Exact versions**
|
||||
|
||||
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||
|
||||
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||
the specific versions, as this allows the concretizer more leeway when selecting
|
||||
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||
|
||||
See #36273 for full details on the version refactor.
|
||||
|
||||
3. **New testing interface**
|
||||
|
||||
Writing package tests is now much simpler with a new [test interface](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||
|
||||
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
```
|
||||
|
||||
You can use Python's native `assert` statement to implement your checks -- no more
|
||||
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||
introspect the class and run `test_*` methods when you run `spack test`,
|
||||
|
||||
4. **More stable concretization**
|
||||
|
||||
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||
and will not change existing parts of an environment unless you specify `--force`.
|
||||
This has always been true for `unify:false`, but not for `unify:true` and
|
||||
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||
|
||||
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||
That is, it will always treat the *roots* of your environment as it would with
|
||||
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||
keeping everything else stable (#30990).
|
||||
|
||||
5. **Weekly develop snapshot releases**
|
||||
|
||||
Since last year, we have maintained a buildcache of `develop` at
|
||||
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||
day. There are now tags in the spack repository like:
|
||||
|
||||
* `develop-2023-05-14`
|
||||
* `develop-2023-05-18`
|
||||
|
||||
that correspond to build caches like:
|
||||
|
||||
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||
|
||||
We plan to store these snapshot releases weekly.
|
||||
|
||||
6. **Specs in buildcaches can be referenced by hash.**
|
||||
|
||||
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||
buildcaches, but referring to them by hash would fail.
|
||||
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||
|
||||
7. **New package and buildcache index websites**
|
||||
|
||||
Our public websites for searching packages have been completely revamped and updated.
|
||||
You can check them out here:
|
||||
|
||||
* *Package Index*: https://packages.spack.io
|
||||
* *Buildcache Index*: https://cache.spack.io
|
||||
|
||||
Both are searchable and more interactive than before. Currently major releases are
|
||||
shown; UI for browsing `develop` snapshots is coming soon.
|
||||
|
||||
8. **Default CMake and Meson build types are now Release**
|
||||
|
||||
Spack has historically defaulted to building with optimization and debugging, but
|
||||
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||
|
||||
* much smaller binaries;
|
||||
* higher default optimization level; and
|
||||
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||
|
||||
You can still get the old behavior back through requirements and package preferences.
|
||||
|
||||
## Other new commands and directives
|
||||
|
||||
* `spack checksum` can automatically add new versions to package (#24532)
|
||||
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||
* New `maintainers` directive (#35083)
|
||||
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||
* Add `--exclude` option to 'spack external find' (#35013)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||
supporting Python 3.6. (#31091, #24885, #37008).
|
||||
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||
* Add container images supporting RHEL alternatives (#36713)
|
||||
* make version(...) kwargs explicit (#36998)
|
||||
|
||||
## Notable refactors
|
||||
|
||||
* buildcache create: reproducible tarballs (#35623)
|
||||
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||
* spack install: simplify behavior when inside environments (#35206)
|
||||
|
||||
## Binary cache and stack updates
|
||||
|
||||
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||
* Many improvements to our CI pipeline's reliability
|
||||
|
||||
## Removals, Deprecations, and disablements
|
||||
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||
supports Python 3.6 and higher.
|
||||
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||
* `graviton` is now `cortex_a72`
|
||||
* `graviton2` is now `neoverse_n1`
|
||||
* `graviton3` is now `neoverse_v1`
|
||||
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||
removed in this release. Use `exclude` and `include` instead.
|
||||
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||
--bootstrap find` instead (#33964).
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||
replaced by positional arguments (#37457).
|
||||
* Deprecate `env:` as top level environment key (#37424)
|
||||
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||
* Improve package source code context display on error (#37655)
|
||||
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||
* Package requirements: allow single specs in requirement lists (#36258)
|
||||
* conditional variant values: allow boolean (#33939)
|
||||
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,179 total packages, 499 new since `v0.19.0`
|
||||
* 329 new Python packages
|
||||
* 31 new R packages
|
||||
* 336 people contributed to this release
|
||||
* 317 committers to packages
|
||||
* 62 committers to core
|
||||
|
||||
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
16
lib/spack/docs/_pygments/style.py
Normal file
16
lib/spack/docs/_pygments/style.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
@@ -149,7 +149,6 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -233,30 +232,8 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -341,16 +318,15 @@ class SpackStyle(DefaultStyle):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
||||
@@ -143,6 +143,26 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
* - AlmaLinux 8
|
||||
- ``almalinux:8``
|
||||
- ``spack/almalinux8``
|
||||
* - AlmaLinux 9
|
||||
- ``almalinux:9``
|
||||
- ``spack/almalinux9``
|
||||
* - Rocky Linux 8
|
||||
- ``rockylinux:8``
|
||||
- ``spack/rockylinux8``
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
|
||||
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -616,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
||||
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load gcc-4.9.0
|
||||
$ module load gcc/4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
@@ -76,6 +76,7 @@ or refer to the full manual below.
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
||||
$ module avail
|
||||
|
||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
Neither of these is particularly pretty, easy to remember, or easy to
|
||||
type. Luckily, Spack offers many facilities for customizing the module
|
||||
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
||||
|
||||
$ spack module tcl loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
|
||||
@@ -826,12 +826,12 @@ For example, consider the following on one system:
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
|
||||
484
lib/spack/docs/signing.rst
Normal file
484
lib/spack/docs/signing.rst
Normal file
@@ -0,0 +1,484 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit 4b1f21802a23b536bbcce73d3c631a566b20e8bd)
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
@@ -2803,7 +2803,7 @@
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "10.2",
|
||||
"versions": "10.2:10.2.99",
|
||||
"flags" : "-mcpu=zeus"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.20.0.dev0"
|
||||
__version__ = "0.21.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
||||
@@ -760,13 +760,12 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
@@ -1209,9 +1208,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
@@ -1281,41 +1277,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1336,7 +1308,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1596,35 +1567,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
|
||||
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
|
||||
@@ -589,7 +589,6 @@ def set_module_variables_for_package(pkg):
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
@@ -1374,7 +1373,7 @@ def long_message(self):
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
out.write(" {0}n".format(test_log))
|
||||
out.write(" {0}\n".format(test_log))
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
from typing import Tuple
|
||||
|
||||
@@ -13,21 +14,24 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
def cmake_cache_string(name, value, comment=""):
|
||||
def cmake_cache_string(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
def cmake_cache_option(name, boolean_value, comment=""):
|
||||
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
||||
"""Generate a string for a cmake configuration option"""
|
||||
|
||||
value = "ON" if boolean_value else "OFF"
|
||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
@@ -63,6 +67,34 @@ def cache_name(self):
|
||||
def cache_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||
|
||||
# Implement a version of the define_from_variant for Cached packages
|
||||
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||
"""Return a Cached CMake field from the given variant's value.
|
||||
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
field = None
|
||||
if isinstance(value, bool):
|
||||
field = cmake_cache_option(cmake_var, value, comment)
|
||||
else:
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
field = cmake_cache_string(cmake_var, value, comment)
|
||||
|
||||
return field
|
||||
|
||||
def initconfig_compiler_entries(self):
|
||||
# This will tell cmake to use the Spack compiler wrappers when run
|
||||
# through Spack, but use the underlying compiler when run outside of
|
||||
@@ -130,6 +162,17 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
@@ -195,26 +238,57 @@ def initconfig_hardware_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
# Provide standard CMake arguments for dependent CachedCMakePackages
|
||||
if spec.satisfies("^cuda"):
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# Cuda")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
cudatoolkitdir = spec["cuda"].prefix
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
@@ -15,7 +16,6 @@
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
@@ -180,6 +180,51 @@ def test(self):
|
||||
work_dir="spack-test",
|
||||
)
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
@@ -225,51 +270,6 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends, maintainers
|
||||
from spack.directives import extends
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,8 +71,6 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers("glennpj")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "RPackage"
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
@@ -30,8 +31,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
#: Name of private sip module to install alongside package
|
||||
sip_module = "sip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
#: Callback names for install-time testing
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "sip"
|
||||
|
||||
@@ -87,18 +88,20 @@ def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
|
||||
def test(self):
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
"test_imports_{0}".format(module),
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
)
|
||||
):
|
||||
python("-c", "import {0}".format(module))
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
|
||||
@@ -751,11 +751,12 @@ def generate_gitlab_ci_yaml(
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
@@ -768,6 +769,7 @@ def generate_gitlab_ci_yaml(
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
@@ -831,6 +833,14 @@ def generate_gitlab_ci_yaml(
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
@@ -936,7 +946,7 @@ def generate_gitlab_ci_yaml(
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
cli_scopes = [
|
||||
os.path.abspath(s.path)
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
and s.path not in env_includes
|
||||
@@ -1085,7 +1095,7 @@ def generate_gitlab_ci_yaml(
|
||||
raise AttributeError
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
@@ -1207,7 +1217,7 @@ def main_script_replacements(cmd):
|
||||
).format(c_spec, release_spec)
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
|
||||
if prune_dag and not rebuild_spec and not copy_only_pipeline:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||
release_spec.name, release_spec.dag_hash()
|
||||
@@ -1298,7 +1308,7 @@ def main_script_replacements(cmd):
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
if spack_pipeline_type != "spack_copy_only":
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
@@ -1330,7 +1340,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if spack_pipeline_type == "spack_copy_only":
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1474,12 +1484,18 @@ def main_script_replacements(cmd):
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
else:
|
||||
# No jobs were generated
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
sorted_output = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
|
||||
@@ -347,7 +347,7 @@ def iter_groups(specs, indent, all_headers):
|
||||
spack.spec.architecture_color,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
f"{compiler.name}@{compiler.version}" if compiler else "no compiler",
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
# Sometimes we want to display specs that are not yet concretized.
|
||||
|
||||
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
@@ -272,71 +228,17 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -710,21 +557,8 @@ def sync_fn(args):
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -803,13 +637,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
||||
@@ -228,7 +228,7 @@ def ci_reindex(args):
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||
|
||||
@@ -349,7 +349,7 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=False,
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
@@ -357,6 +357,17 @@ def install_status():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
|
||||
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -98,7 +98,7 @@ def compiler_find(args):
|
||||
config = spack.config.config
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
@@ -106,19 +106,21 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec)
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
@@ -130,7 +132,7 @@ def compiler_info(args):
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
else:
|
||||
for c in compilers:
|
||||
print(str(c.spec) + ":")
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
@@ -188,7 +190,7 @@ def compiler_list(args):
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec for c in compilers)))
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
||||
@@ -715,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.split("\n")
|
||||
lines = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
|
||||
@@ -302,7 +302,7 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
_env_create(
|
||||
env = _env_create(
|
||||
args.create_env,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
@@ -310,6 +310,9 @@ def env_create(args):
|
||||
keep_relative=args.keep_relative,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
@@ -79,6 +79,12 @@ def setup_parser(subparser):
|
||||
read_cray_manifest.add_argument(
|
||||
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--ignore-default-dir",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="ignore the default directory of manifest files",
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
@@ -177,11 +183,16 @@ def external_read_cray_manifest(args):
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run,
|
||||
fail_on_error=args.fail_on_error,
|
||||
ignore_default_dir=args.ignore_default_dir,
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||
manifest_file=None,
|
||||
manifest_directory=None,
|
||||
dry_run=False,
|
||||
fail_on_error=False,
|
||||
ignore_default_dir=False,
|
||||
):
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
@@ -191,7 +202,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
if not ignore_default_dir and os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read.".format(
|
||||
cray_manifest.default_path
|
||||
|
||||
@@ -44,7 +44,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
|
||||
@@ -31,7 +31,11 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
"-y",
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
_cache_config_file = []
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
@@ -112,36 +111,26 @@ def _to_dict(compiler):
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
def init_compiler_config():
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
if merged_config:
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
# Update the configuration if there are currently no compilers
|
||||
# configured. Avoid updating automatically if there ARE site
|
||||
# compilers configured but no user ones.
|
||||
if not config and init_config:
|
||||
if scope is None:
|
||||
# We know no compilers were configured in any scope.
|
||||
init_compiler_config()
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
elif scope == "user":
|
||||
# Check the site config and update the user config if
|
||||
# nothing is configured at the site level.
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
sys_config = spack.config.get("compilers", scope="system")
|
||||
if not site_config and not sys_config:
|
||||
init_compiler_config()
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
return config
|
||||
elif config:
|
||||
return config
|
||||
else:
|
||||
return [] # Return empty list which we will later append to.
|
||||
return config
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -165,52 +154,65 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
global _cache_config_file
|
||||
_cache_config_file = compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_specs: a list of CompilerSpec objects.
|
||||
scope: configuration scope to modify.
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
# Need a better way for this
|
||||
global _cache_config_file
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
config_length = len(compiler_config)
|
||||
|
||||
filtered_compiler_config = [
|
||||
comp
|
||||
for comp in compiler_config
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
comp["compiler"]["spec"], compiler=True
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
# Update the cache for changes
|
||||
_cache_config_file = filtered_compiler_config
|
||||
if len(filtered_compiler_config) == config_length: # No items removed
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
spack.config.set("compilers", filtered_compiler_config, scope=scope)
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
# Get compilers for this architecture.
|
||||
# Create a cache of the config file so we don't load all the time.
|
||||
global _cache_config_file
|
||||
if not _cache_config_file:
|
||||
_cache_config_file = get_compiler_config(scope, init_config)
|
||||
return _cache_config_file
|
||||
else:
|
||||
return _cache_config_file
|
||||
return get_compiler_config(scope, init_config)
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
|
||||
@@ -151,7 +151,11 @@ def setup_custom_environment(self, pkg, env):
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
@@ -111,14 +111,6 @@
|
||||
overrides_base_name = "overrides-"
|
||||
|
||||
|
||||
def first_existing(dictionary, keys):
|
||||
"""Get the value of the first key in keys that is in the dictionary."""
|
||||
try:
|
||||
return next(k for k in keys if k in dictionary)
|
||||
except StopIteration:
|
||||
raise KeyError("None of %s is in dict!" % str(keys))
|
||||
|
||||
|
||||
class ConfigScope(object):
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
@@ -838,12 +830,10 @@ def _config():
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
"""Add updates to a config from a filename"""
|
||||
import spack.environment as ev
|
||||
|
||||
# Get file as config dict
|
||||
# Extract internal attributes, if we are dealing with an environment
|
||||
data = read_config_file(filename)
|
||||
if any(k in data for k in spack.schema.env.keys):
|
||||
data = ev.config_dict(data)
|
||||
if spack.schema.env.TOP_LEVEL_KEY in data:
|
||||
data = data[spack.schema.env.TOP_LEVEL_KEY]
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
@@ -1353,17 +1343,11 @@ def use_configuration(*scopes_or_paths):
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ def validate(configuration_file):
|
||||
config = syaml.load(f)
|
||||
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
env_dict = ev.config_dict(config)
|
||||
env_dict = config[ev.TOP_LEVEL_KEY]
|
||||
env_dict.setdefault(
|
||||
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -31,7 +31,7 @@
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -45,7 +45,7 @@
|
||||
"template": "container/rockylinux_9.dockerfile",
|
||||
"image": "docker.io/rockylinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -59,7 +59,7 @@
|
||||
"template": "container/rockylinux_8.dockerfile",
|
||||
"image": "docker.io/rockylinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -73,7 +73,7 @@
|
||||
"template": "container/almalinux_9.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -87,7 +87,7 @@
|
||||
"template": "container/almalinux_8.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -101,7 +101,7 @@
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
@@ -185,6 +185,16 @@
|
||||
"install": "apt-get -yqq install",
|
||||
"clean": "rm -rf /var/lib/apt/lists/*"
|
||||
},
|
||||
"dnf": {
|
||||
"update": "dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"dnf_epel": {
|
||||
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"yum": {
|
||||
"update": "yum update -y && yum install -y epel-release && yum update -y",
|
||||
"install": "yum install -y",
|
||||
|
||||
@@ -50,7 +50,7 @@ def create(configuration, last_phase=None):
|
||||
configuration (dict): how to generate the current recipe
|
||||
last_phase (str): last phase to be printed or None to print them all
|
||||
"""
|
||||
name = ev.config_dict(configuration)["container"]["format"]
|
||||
name = configuration[ev.TOP_LEVEL_KEY]["container"]["format"]
|
||||
return _writer_factory[name](configuration, last_phase)
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ class PathContext(tengine.Context):
|
||||
template_name: Optional[str] = None
|
||||
|
||||
def __init__(self, config, last_phase):
|
||||
self.config = ev.config_dict(config)
|
||||
self.config = config[ev.TOP_LEVEL_KEY]
|
||||
self.container_config = self.config["container"]
|
||||
|
||||
# Operating system tag as written in the configuration file
|
||||
|
||||
@@ -48,7 +48,8 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
def compiler_from_entry(entry):
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
version = entry["version"]
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
operating_system = arch["os"]
|
||||
target = arch["target"]
|
||||
@@ -163,7 +164,10 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = vn.Version("6")
|
||||
_db_version = vn.Version("7")
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -72,6 +72,7 @@
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(vn.Version("0.9.3"), vn.Version("5")),
|
||||
(vn.Version("5"), vn.Version("6")),
|
||||
(vn.Version("6"), vn.Version("7")),
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -105,7 +106,11 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
@@ -743,7 +748,9 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -766,7 +773,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1172,7 +1179,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
||||
@@ -337,6 +337,7 @@
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
TOP_LEVEL_KEY,
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
SpackEnvironmentViewError,
|
||||
@@ -345,7 +346,6 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
config_dict,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -369,6 +369,7 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"TOP_LEVEL_KEY",
|
||||
"Environment",
|
||||
"SpackEnvironmentError",
|
||||
"SpackEnvironmentViewError",
|
||||
@@ -377,7 +378,6 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"config_dict",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
import spack.version
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
@@ -124,7 +125,7 @@ def default_manifest_yaml():
|
||||
valid_environment_name_re = r"^\w[\w-]*$"
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
lockfile_format_version = 5
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
@@ -132,6 +133,7 @@ def default_manifest_yaml():
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
5: spack.spec.SpecfileV4,
|
||||
}
|
||||
|
||||
|
||||
@@ -361,19 +363,6 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def config_dict(yaml_data):
|
||||
"""Get the configuration scope section out of an spack.yaml"""
|
||||
# TODO (env:): Remove env: as a possible top level keyword in v0.21
|
||||
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
|
||||
if key == "env":
|
||||
msg = (
|
||||
"using 'env:' as a top-level attribute of a Spack environment is deprecated and "
|
||||
"will be removed in Spack v0.21. Please use 'spack:' instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
return yaml_data[key]
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -821,8 +810,8 @@ def write_transaction(self):
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Read manifest file and set up user specs."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
|
||||
for item in config_dict(self.manifest).get("definitions", []):
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
for item in env_configuration.get("definitions", []):
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
@@ -834,13 +823,13 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
spec_list = config_dict(self.manifest).get(user_speclist_name, [])
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
enable_view = config_dict(self.manifest).get("view")
|
||||
enable_view = env_configuration.get("view")
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
|
||||
@@ -855,14 +844,11 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.manifest)
|
||||
|
||||
# Retrieve unification scheme for the concretizer
|
||||
self.unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = copy.deepcopy(configuration.get("develop", {}))
|
||||
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
|
||||
for name, entry in self.dev_specs.items():
|
||||
# spec must include a concrete version
|
||||
assert Spec(entry["spec"]).versions.concrete_range_as_version
|
||||
@@ -982,7 +968,7 @@ def included_config_scopes(self):
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = config_dict(self.manifest).get("include", [])
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
@@ -1075,10 +1061,7 @@ def env_file_config_scope(self):
|
||||
"""Get the configuration scope for the environment's manifest file."""
|
||||
config_name = self.env_file_config_scope_name()
|
||||
return spack.config.SingleFileScope(
|
||||
config_name,
|
||||
self.manifest_path,
|
||||
spack.schema.env.schema,
|
||||
[spack.config.first_existing(self.manifest, spack.schema.env.keys)],
|
||||
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
def config_scopes(self):
|
||||
@@ -1221,28 +1204,27 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
for spec in matches:
|
||||
if spec in list_to_change:
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||
msg += f"cannot be removed from list '{list_to_change}'."
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
if spec not in list_to_change:
|
||||
continue
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||
msg += f"cannot be removed from list '{list_to_change}'."
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.remove_user_spec(str(spec))
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
for user_spec in matches:
|
||||
self.manifest.remove_user_spec(str(user_spec))
|
||||
else:
|
||||
for user_spec in matches:
|
||||
self.manifest.remove_definition(str(user_spec), list_name=list_name)
|
||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||
|
||||
# If force, update stale concretized specs
|
||||
for spec in old_specs - new_specs:
|
||||
@@ -1352,6 +1334,10 @@ def concretize(self, force=False, tests=False):
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
# Remove concrete specs that no longer correlate to a user spec
|
||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||
self.deconcretize(spec)
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == "when_possible":
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
@@ -1365,6 +1351,16 @@ def concretize(self, force=False, tests=False):
|
||||
msg = "concretization strategy not implemented [{0}]"
|
||||
raise SpackEnvironmentError(msg.format(self.unify))
|
||||
|
||||
def deconcretize(self, spec):
|
||||
# spec has to be a root of the environment
|
||||
index = self.concretized_user_specs.index(spec)
|
||||
dag_hash = self.concretized_order.pop(index)
|
||||
del self.concretized_user_specs[index]
|
||||
|
||||
# If this was the only user spec that concretized to this concrete spec, remove it
|
||||
if dag_hash not in self.concretized_order:
|
||||
del self.specs_by_hash[dag_hash]
|
||||
|
||||
def _get_specs_to_concretize(
|
||||
self,
|
||||
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||
@@ -1553,12 +1549,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype="test")
|
||||
for test_dependency in test_deps:
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test"
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2189,9 +2186,9 @@ def _read_lockfile_dict(self, d):
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2383,17 +2380,28 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
if len(root_spec) != 1:
|
||||
m = "The constraints %s are not a valid spec " % spec_constraints
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
hash_spec = [s for s in spec_constraints if s.abstract_hash]
|
||||
|
||||
error_message = "The constraints %s are not a valid spec " % spec_constraints
|
||||
error_message += "concretization target. all specs must have a single name "
|
||||
error_message += "constraint for concretization."
|
||||
|
||||
if len(root_spec) > 1:
|
||||
raise InvalidSpecConstraintError(error_message)
|
||||
|
||||
if len(root_spec) < 1:
|
||||
if len(hash_spec) < 1:
|
||||
raise InvalidSpecConstraintError(error_message)
|
||||
|
||||
if root_spec:
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
root_spec = root_spec[0] if root_spec else Spec()
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
s = root_spec.copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
@@ -2671,8 +2679,8 @@ def add_user_spec(self, user_spec: str) -> None:
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
"""
|
||||
config_dict(self.pristine_yaml_content).setdefault("specs", []).append(user_spec)
|
||||
config_dict(self.yaml_content).setdefault("specs", []).append(user_spec)
|
||||
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||
self.configuration.setdefault("specs", []).append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_user_spec(self, user_spec: str) -> None:
|
||||
@@ -2685,8 +2693,8 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"].remove(user_spec)
|
||||
config_dict(self.yaml_content)["specs"].remove(user_spec)
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2703,8 +2711,8 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
SpackEnvironmentError: when the user spec cannot be overridden
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"][idx] = user_spec
|
||||
config_dict(self.yaml_content)["specs"][idx] = user_spec
|
||||
self.pristine_configuration["specs"][idx] = user_spec
|
||||
self.configuration["specs"][idx] = user_spec
|
||||
except ValueError as e:
|
||||
msg = f"cannot override {user_spec} from {self}"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2720,14 +2728,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
Raises:
|
||||
SpackEnvironmentError: is no valid definition exists already
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].append(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2741,7 +2749,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||
or the list does not exist
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = (
|
||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||
f"no valid list exists"
|
||||
@@ -2754,7 +2762,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].remove(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2769,7 +2777,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
Raises:
|
||||
SpackEnvironmentError: if the user spec cannot be overridden
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
@@ -2780,7 +2788,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name][sub_index] = override
|
||||
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2812,24 +2820,24 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
True the default view is used for the environment, if False there's no view.
|
||||
"""
|
||||
if isinstance(view, dict):
|
||||
config_dict(self.pristine_yaml_content)["view"][default_view_name].update(view)
|
||||
config_dict(self.yaml_content)["view"][default_view_name].update(view)
|
||||
self.pristine_configuration["view"][default_view_name].update(view)
|
||||
self.configuration["view"][default_view_name].update(view)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
if not isinstance(view, bool):
|
||||
view = str(view)
|
||||
|
||||
config_dict(self.pristine_yaml_content)["view"] = view
|
||||
config_dict(self.yaml_content)["view"] = view
|
||||
self.pristine_configuration["view"] = view
|
||||
self.configuration["view"] = view
|
||||
self.changed = True
|
||||
|
||||
def remove_default_view(self) -> None:
|
||||
"""Removes the default view from the manifest file"""
|
||||
view_data = config_dict(self.pristine_yaml_content).get("view")
|
||||
view_data = self.pristine_configuration.get("view")
|
||||
if isinstance(view_data, collections.abc.Mapping):
|
||||
config_dict(self.pristine_yaml_content)["view"].pop(default_view_name)
|
||||
config_dict(self.yaml_content)["view"].pop(default_view_name)
|
||||
self.pristine_configuration["view"].pop(default_view_name)
|
||||
self.configuration["view"].pop(default_view_name)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
@@ -2846,12 +2854,10 @@ def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
|
||||
if entry["path"] == pkg_name:
|
||||
entry.pop("path")
|
||||
|
||||
config_dict(self.pristine_yaml_content).setdefault("develop", {}).setdefault(
|
||||
pkg_name, {}
|
||||
).update(entry)
|
||||
config_dict(self.yaml_content).setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
entry
|
||||
)
|
||||
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
|
||||
self.changed = True
|
||||
|
||||
def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
@@ -2864,11 +2870,11 @@ def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
SpackEnvironmentError: if there is nothing to remove
|
||||
"""
|
||||
try:
|
||||
del config_dict(self.pristine_yaml_content)["develop"][pkg_name]
|
||||
del self.pristine_configuration["develop"][pkg_name]
|
||||
except KeyError as e:
|
||||
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
del config_dict(self.yaml_content)["develop"][pkg_name]
|
||||
del self.configuration["develop"][pkg_name]
|
||||
self.changed = True
|
||||
|
||||
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
@@ -2879,11 +2885,11 @@ def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
|
||||
"""
|
||||
init_file_dir = pathlib.Path(init_file_dir).absolute()
|
||||
for _, entry in config_dict(self.pristine_yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.pristine_configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
|
||||
for _, entry in config_dict(self.yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
self.changed = True
|
||||
@@ -2897,6 +2903,16 @@ def flush(self) -> None:
|
||||
_write_yaml(self.pristine_yaml_content, f)
|
||||
self.changed = False
|
||||
|
||||
@property
|
||||
def pristine_configuration(self):
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||
return self.yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.yaml_content)
|
||||
|
||||
|
||||
@@ -544,6 +544,7 @@ def _static_edges(specs, deptype):
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -215,6 +215,31 @@ def print_message(logger: LogType, msg: str, verbose: bool = False):
|
||||
tty.info(msg, format="g")
|
||||
|
||||
|
||||
def overall_status(current_status: "TestStatus", substatuses: List["TestStatus"]) -> "TestStatus":
|
||||
"""Determine the overall status based on the current and associated sub status values.
|
||||
|
||||
Args:
|
||||
current_status: current overall status, assumed to default to PASSED
|
||||
substatuses: status of each test part or overall status of each test spec
|
||||
Returns:
|
||||
test status encompassing the main test and all subtests
|
||||
"""
|
||||
if current_status in [TestStatus.SKIPPED, TestStatus.NO_TESTS, TestStatus.FAILED]:
|
||||
return current_status
|
||||
|
||||
skipped = 0
|
||||
for status in substatuses:
|
||||
if status == TestStatus.FAILED:
|
||||
return status
|
||||
elif status == TestStatus.SKIPPED:
|
||||
skipped += 1
|
||||
|
||||
if skipped and skipped == len(substatuses):
|
||||
return TestStatus.SKIPPED
|
||||
|
||||
return current_status
|
||||
|
||||
|
||||
class PackageTest:
|
||||
"""The class that manages stand-alone (post-install) package tests."""
|
||||
|
||||
@@ -308,14 +333,12 @@ def status(self, name: str, status: "TestStatus", msg: Optional[str] = None):
|
||||
# to start with the same name) may not have PASSED. This extra
|
||||
# check is used to ensure the containing test part is not claiming
|
||||
# to have passed when at least one subpart failed.
|
||||
if status == TestStatus.PASSED:
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
if substatus == TestStatus.FAILED:
|
||||
print(f"{substatus}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = substatus
|
||||
self.counts[substatus] += 1
|
||||
return
|
||||
substatuses = []
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
substatuses.append(substatus)
|
||||
if substatuses:
|
||||
status = overall_status(status, substatuses)
|
||||
|
||||
print(f"{status}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = status
|
||||
@@ -420,6 +443,26 @@ def summarize(self):
|
||||
lines.append(f"{totals:=^80}")
|
||||
return lines
|
||||
|
||||
def write_tested_status(self):
|
||||
"""Write the overall status to the tested file.
|
||||
|
||||
If there any test part failures, then the tests failed. If all test
|
||||
parts are skipped, then the tests were skipped. If any tests passed
|
||||
then the tests passed; otherwise, there were not tests executed.
|
||||
"""
|
||||
status = TestStatus.NO_TESTS
|
||||
if self.counts[TestStatus.FAILED] > 0:
|
||||
status = TestStatus.FAILED
|
||||
else:
|
||||
skipped = self.counts[TestStatus.SKIPPED]
|
||||
if skipped and self.parts() == skipped:
|
||||
status = TestStatus.SKIPPED
|
||||
elif self.counts[TestStatus.PASSED] > 0:
|
||||
status = TestStatus.PASSED
|
||||
|
||||
with open(self.tested_file, "w") as f:
|
||||
f.write(f"{status.value}\n")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
@@ -654,8 +697,9 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
try:
|
||||
tests = test_functions(spec.package_class)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# some virtuals don't have a package
|
||||
tests = []
|
||||
# Some virtuals don't have a package so we don't want to report
|
||||
# them as not having tests when that isn't appropriate.
|
||||
continue
|
||||
|
||||
if len(tests) == 0:
|
||||
tester.status(spec.name, TestStatus.NO_TESTS)
|
||||
@@ -682,7 +726,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
|
||||
finally:
|
||||
if tester.ran_tests():
|
||||
fs.touch(tester.tested_file)
|
||||
tester.write_tested_status()
|
||||
|
||||
# log one more test message to provide a completion timestamp
|
||||
# for CDash reporting
|
||||
@@ -889,20 +933,15 @@ def __call__(self, *args, **kwargs):
|
||||
if remove_directory:
|
||||
shutil.rmtree(test_dir)
|
||||
|
||||
tested = os.path.exists(self.tested_file_for_spec(spec))
|
||||
if tested:
|
||||
status = TestStatus.PASSED
|
||||
else:
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
status = self.test_status(spec, externals)
|
||||
self.counts[status] += 1
|
||||
|
||||
self.write_test_result(spec, status)
|
||||
|
||||
except SkipTest:
|
||||
status = TestStatus.SKIPPED
|
||||
self.counts[status] += 1
|
||||
self.write_test_result(spec, TestStatus.SKIPPED)
|
||||
|
||||
except BaseException as exc:
|
||||
status = TestStatus.FAILED
|
||||
self.counts[status] += 1
|
||||
@@ -939,6 +978,31 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
return status
|
||||
|
||||
with open(tests_status_file, "r") as f:
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
|
||||
@@ -231,7 +231,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -170,17 +170,10 @@ def merge_config_rules(configuration, spec):
|
||||
Returns:
|
||||
dict: actions to be taken on the spec passed as an argument
|
||||
"""
|
||||
|
||||
# Get the top-level configuration for the module type we are using
|
||||
module_specific_configuration = copy.deepcopy(configuration)
|
||||
|
||||
# Construct a dictionary with the actions we need to perform on the spec
|
||||
# passed as a parameter
|
||||
|
||||
# The keyword 'all' is always evaluated first, all the others are
|
||||
# evaluated in order of appearance in the module file
|
||||
spec_configuration = module_specific_configuration.pop("all", {})
|
||||
for constraint, action in module_specific_configuration.items():
|
||||
spec_configuration = copy.deepcopy(configuration.get("all", {}))
|
||||
for constraint, action in configuration.items():
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
@@ -200,14 +193,14 @@ def merge_config_rules(configuration, spec):
|
||||
# configuration
|
||||
|
||||
# Hash length in module files
|
||||
hash_length = module_specific_configuration.get("hash_length", 7)
|
||||
hash_length = configuration.get("hash_length", 7)
|
||||
spec_configuration["hash_length"] = hash_length
|
||||
|
||||
verbose = module_specific_configuration.get("verbose", False)
|
||||
verbose = configuration.get("verbose", False)
|
||||
spec_configuration["verbose"] = verbose
|
||||
|
||||
# module defaults per-package
|
||||
defaults = module_specific_configuration.get("defaults", [])
|
||||
defaults = configuration.get("defaults", [])
|
||||
spec_configuration["defaults"] = defaults
|
||||
|
||||
return spec_configuration
|
||||
@@ -400,7 +393,7 @@ class BaseConfiguration(object):
|
||||
querying easier. It needs to be sub-classed for specific module types.
|
||||
"""
|
||||
|
||||
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
|
||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
# Module where type(self) is defined
|
||||
@@ -679,6 +672,7 @@ def configure_options(self):
|
||||
return None
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -749,6 +743,19 @@ def environment_modifications(self):
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -56,7 +56,7 @@ def make_context(spec, module_set_name, explicit):
|
||||
return LmodContext(conf)
|
||||
|
||||
|
||||
def guess_core_compilers(name, store=False):
|
||||
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||
"""Guesses the list of core compilers installed in the system.
|
||||
|
||||
Args:
|
||||
@@ -64,21 +64,19 @@ def guess_core_compilers(name, store=False):
|
||||
modules.yaml configuration file
|
||||
|
||||
Returns:
|
||||
List of core compilers, if found, or None
|
||||
List of found core compilers
|
||||
"""
|
||||
core_compilers = []
|
||||
for compiler_config in spack.compilers.all_compilers_config():
|
||||
for compiler in spack.compilers.all_compilers():
|
||||
try:
|
||||
compiler = compiler_config["compiler"]
|
||||
# A compiler is considered to be a core compiler if any of the
|
||||
# C, C++ or Fortran compilers reside in a system directory
|
||||
is_system_compiler = any(
|
||||
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in compiler["paths"].values()
|
||||
if x is not None
|
||||
os.path.dirname(getattr(compiler, x, "")) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in ("cc", "cxx", "f77", "fc")
|
||||
)
|
||||
if is_system_compiler:
|
||||
core_compilers.append(str(compiler["spec"]))
|
||||
core_compilers.append(compiler.spec)
|
||||
except (KeyError, TypeError, AttributeError):
|
||||
continue
|
||||
|
||||
@@ -89,10 +87,10 @@ def guess_core_compilers(name, store=False):
|
||||
modules_cfg = spack.config.get(
|
||||
"modules:" + name, {}, scope=spack.config.default_modify_scope()
|
||||
)
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = core_compilers
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = [str(x) for x in core_compilers]
|
||||
spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
|
||||
|
||||
return core_compilers or None
|
||||
return core_compilers
|
||||
|
||||
|
||||
class LmodConfiguration(BaseConfiguration):
|
||||
@@ -104,7 +102,7 @@ class LmodConfiguration(BaseConfiguration):
|
||||
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
||||
|
||||
@property
|
||||
def core_compilers(self):
|
||||
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
|
||||
"""Returns the list of "Core" compilers
|
||||
|
||||
Raises:
|
||||
@@ -112,14 +110,18 @@ def core_compilers(self):
|
||||
specified in the configuration file or the sequence
|
||||
is empty
|
||||
"""
|
||||
value = configuration(self.name).get("core_compilers") or guess_core_compilers(
|
||||
self.name, store=True
|
||||
)
|
||||
compilers = [
|
||||
spack.spec.CompilerSpec(c) for c in configuration(self.name).get("core_compilers", [])
|
||||
]
|
||||
|
||||
if not value:
|
||||
if not compilers:
|
||||
compilers = guess_core_compilers(self.name, store=True)
|
||||
|
||||
if not compilers:
|
||||
msg = 'the key "core_compilers" must be set in modules.yaml'
|
||||
raise CoreCompilersNotFoundError(msg)
|
||||
return value
|
||||
|
||||
return compilers
|
||||
|
||||
@property
|
||||
def core_specs(self):
|
||||
@@ -132,6 +134,7 @@ def filter_hierarchy_specs(self):
|
||||
return configuration(self.name).get("filter_hierarchy_specs", {})
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def hierarchy_tokens(self):
|
||||
"""Returns the list of tokens that are part of the modulefile
|
||||
hierarchy. 'compiler' is always present.
|
||||
@@ -156,6 +159,7 @@ def hierarchy_tokens(self):
|
||||
return tokens
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def requires(self):
|
||||
"""Returns a dictionary mapping all the requirements of this spec
|
||||
to the actual provider. 'compiler' is always present among the
|
||||
@@ -222,6 +226,7 @@ def available(self):
|
||||
return available
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def missing(self):
|
||||
"""Returns the list of tokens that are not available."""
|
||||
return [x for x in self.hierarchy_tokens if x not in self.available]
|
||||
@@ -283,16 +288,18 @@ def token_to_path(self, name, value):
|
||||
|
||||
# If we are dealing with a core compiler, return 'Core'
|
||||
core_compilers = self.conf.core_compilers
|
||||
if name == "compiler" and str(value) in core_compilers:
|
||||
if name == "compiler" and any(
|
||||
spack.spec.CompilerSpec(value).satisfies(c) for c in core_compilers
|
||||
):
|
||||
return "Core"
|
||||
|
||||
# CompilerSpec does not have an hash, as we are not allowed to
|
||||
# CompilerSpec does not have a hash, as we are not allowed to
|
||||
# use different flavors of the same compiler
|
||||
if name == "compiler":
|
||||
return path_part_fmt.format(token=value)
|
||||
|
||||
# In case the hierarchy token refers to a virtual provider
|
||||
# we need to append an hash to the version to distinguish
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
path = path_part_fmt.format(token=value)
|
||||
@@ -313,6 +320,7 @@ def available_path_parts(self):
|
||||
return parts
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def unlocked_paths(self):
|
||||
"""Returns a dictionary mapping conditions to a list of unlocked
|
||||
paths.
|
||||
@@ -424,6 +432,7 @@ def missing(self):
|
||||
return self.conf.missing
|
||||
|
||||
@tengine.context_property
|
||||
@lang.memoized
|
||||
def unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked unconditionally."""
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
|
||||
@@ -108,6 +108,5 @@
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
# is set up.
|
||||
make = MakeExecutable("make", jobs=1)
|
||||
gmake = MakeExecutable("gmake", jobs=1)
|
||||
ninja = MakeExecutable("ninja", jobs=1)
|
||||
configure = Executable(join_path(".", "configure"))
|
||||
|
||||
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
|
||||
@@ -291,7 +291,7 @@ def next_spec(
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -37,7 +37,9 @@
|
||||
|
||||
|
||||
def slingshot_network():
|
||||
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
|
||||
return os.path.exists("/opt/cray/pe") and (
|
||||
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||
)
|
||||
|
||||
|
||||
def _target_name_from_craype_target_name(name):
|
||||
|
||||
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV4.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
|
||||
import macholib.mach_o
|
||||
@@ -356,13 +355,7 @@ def _set_elf_rpaths(target, rpaths):
|
||||
# Join the paths using ':' as a separator
|
||||
rpaths_str = ":".join(rpaths)
|
||||
|
||||
# If we're relocating patchelf itself, make a copy and use it
|
||||
bak_path = None
|
||||
if target.endswith("/bin/patchelf"):
|
||||
bak_path = target + ".bak"
|
||||
shutil.copy(target, bak_path)
|
||||
|
||||
patchelf, output = executable.Executable(bak_path or _patchelf()), None
|
||||
patchelf, output = executable.Executable(_patchelf()), None
|
||||
try:
|
||||
# TODO: revisit the use of --force-rpath as it might be conditional
|
||||
# TODO: if we want to support setting RUNPATH from binary packages
|
||||
@@ -371,9 +364,6 @@ def _set_elf_rpaths(target, rpaths):
|
||||
except executable.ProcessError as e:
|
||||
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
||||
tty.warn(msg.format(target, e))
|
||||
finally:
|
||||
if bak_path and os.path.exists(bak_path):
|
||||
os.remove(bak_path)
|
||||
return output
|
||||
|
||||
|
||||
|
||||
@@ -1239,7 +1239,7 @@ def get_pkg_class(self, pkg_name):
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(pkg_name)
|
||||
raise UnknownPackageError(fullname)
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
|
||||
@@ -209,7 +209,7 @@ def update(data):
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ev.config_dict(ci_env.manifest)
|
||||
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ("spack", "env")
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
spec_list_schema = {
|
||||
"type": "array",
|
||||
@@ -47,8 +47,8 @@
|
||||
"title": "Spack environment file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
"^env|spack$": {
|
||||
"properties": {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -861,9 +861,9 @@ class SpackSolverSetup(object):
|
||||
def __init__(self, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
self.possible_versions = {}
|
||||
self.deprecated_versions = {}
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
self.possible_virtuals = None
|
||||
self.possible_compilers = []
|
||||
@@ -1722,10 +1722,6 @@ class Body(object):
|
||||
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
@@ -1766,12 +1762,7 @@ def key_fn(item):
|
||||
if isinstance(v, vn.GitVersion):
|
||||
version_defs.append(v)
|
||||
else:
|
||||
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"Preference for version {0} does not match any version "
|
||||
" defined in {1}".format(str(v), pkg_name)
|
||||
)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
|
||||
# Amongst all defined versions satisfying this specific
|
||||
# preference, the highest-numbered version is the
|
||||
# most-preferred: therefore sort satisfying versions
|
||||
@@ -1784,6 +1775,28 @@ def key_fn(item):
|
||||
)
|
||||
self.possible_versions[pkg_name].add(vdef)
|
||||
|
||||
def _check_for_defined_matching_versions(self, pkg_class, v):
|
||||
"""Given a version specification (which may be a concrete version,
|
||||
range, etc.), determine if any package.py version declarations
|
||||
or externals define a version which satisfies it.
|
||||
|
||||
This is primarily for determining whether a version request (e.g.
|
||||
version preferences, which should not themselves define versions)
|
||||
refers to a defined version.
|
||||
|
||||
This function raises an exception if no satisfying versions are
|
||||
found.
|
||||
"""
|
||||
pkg_name = pkg_class.name
|
||||
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"Preference for version {0} does not match any version"
|
||||
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
|
||||
)
|
||||
return satisfying_versions
|
||||
|
||||
def add_concrete_versions_from_specs(self, specs, origin):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
for s in spack.traverse.traverse_nodes(specs):
|
||||
@@ -2215,14 +2228,6 @@ def setup(self, driver, specs, reuse=None):
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = _get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
@@ -2250,6 +2255,14 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
@@ -2296,83 +2309,78 @@ def literal_specs(self, specs):
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements():
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(_specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
|
||||
def _specs_from_requires(pkg_name, section):
|
||||
"""Collect specs from requirements which define versions (i.e. those that
|
||||
have a concrete version). Requirements can define *new* versions if
|
||||
they are included as part of an equivalence (hash=number) but not
|
||||
otherwise.
|
||||
"""
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
def _specs_from_requires(self, pkg_name, section):
|
||||
"""Collect specs from requirements which define versions (i.e. those that
|
||||
have a concrete version). Requirements can define *new* versions if
|
||||
they are included as part of an equivalence (hash=number) but not
|
||||
otherwise.
|
||||
"""
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
if spec.versions.concrete:
|
||||
# Note: this includes git versions
|
||||
version_specs.append(spec)
|
||||
continue
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = list(v for v in pkg_class.versions if v.satisfies(spec.versions))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"{0} assigns a version that is not defined in"
|
||||
" the associated package.py".format(str(spec))
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
if spec.versions.concrete:
|
||||
# Note: this includes git versions
|
||||
version_specs.append(spec)
|
||||
continue
|
||||
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
|
||||
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||
# will end up here
|
||||
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||
version_specs.extend(vspecs)
|
||||
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||
# will end up here
|
||||
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||
version_specs.extend(vspecs)
|
||||
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
|
||||
|
||||
class SpecBuilder(object):
|
||||
@@ -2492,10 +2500,15 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
@@ -2573,6 +2586,8 @@ def sort_fn(function_tuple):
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
return (0, 0) # note out of order so this goes last
|
||||
elif name == "virtual_on_edge":
|
||||
return (1, 0)
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
@@ -2828,12 +2843,13 @@ class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
|
||||
def __init__(self, provided, conflicts):
|
||||
indented = [" %s\n" % conflict for conflict in conflicts]
|
||||
error_msg = "".join(indented)
|
||||
msg = "Spack concretizer internal error. Please submit a bug report"
|
||||
msg += "\n Please include the command, environment if applicable,"
|
||||
msg += "\n and the following error message."
|
||||
msg = "\n %s is unsatisfiable, errors are:\n%s" % (provided, error_msg)
|
||||
msg = (
|
||||
"Spack concretizer internal error. Please submit a bug report and include the "
|
||||
"command, environment if applicable and the following error message."
|
||||
f"\n {provided} is unsatisfiable, errors are:"
|
||||
)
|
||||
|
||||
msg += "".join([f"\n {conflict}" for conflict in conflicts])
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -300,6 +300,11 @@ attr("depends_on", Package, Provider, Type)
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
attr("virtual_on_edge", Package, Provider, Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
|
||||
@@ -170,7 +170,7 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -679,6 +679,16 @@ def from_dict(d):
|
||||
d = d["compiler"]
|
||||
return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
|
||||
|
||||
@property
|
||||
def display_str(self):
|
||||
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
|
||||
@= for readability."""
|
||||
if self.concrete:
|
||||
return f"{self.name}@{self.version}"
|
||||
elif self.versions != vn.any_version:
|
||||
return f"{self.name}@{self.versions}"
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
out = self.name
|
||||
if self.versions and self.versions != vn.any_version:
|
||||
@@ -704,47 +714,81 @@ class DependencySpec:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "deptypes"
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
|
||||
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.deptypes = dp.canonical_deptype(deptypes)
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
|
||||
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
|
||||
deptypes = set(deptypes)
|
||||
deptypes.update(self.deptypes)
|
||||
deptypes = tuple(sorted(deptypes))
|
||||
changed = self.deptypes != deptypes
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
self.deptypes = deptypes
|
||||
return changed
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
|
||||
|
||||
def add_type(self, type: dp.DependencyArgument):
|
||||
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "%s %s--> %s" % (
|
||||
self.parent.name if self.parent else None,
|
||||
self.deptypes,
|
||||
self.spec.name if self.spec else None,
|
||||
)
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
class CompilerFlag(str):
|
||||
@@ -1565,10 +1609,12 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
@@ -1586,34 +1632,58 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
)
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptypes)
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1730,14 +1800,14 @@ def traverse_edges(self, **kwargs):
|
||||
def short_spec(self):
|
||||
"""Returns a version of the spec with the dependencies hashed
|
||||
instead of completely enumerated."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.format(spec_format)
|
||||
|
||||
@property
|
||||
def cshort_spec(self):
|
||||
"""Returns an auto-colorized version of ``self.short_spec``."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.cformat(spec_format)
|
||||
|
||||
@@ -1886,12 +1956,12 @@ def lookup_hash(self):
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=())
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=())
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2026,8 +2096,14 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
@@ -2351,7 +2427,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2369,8 +2445,10 @@ def from_dict(data):
|
||||
spec = SpecfileV1.load(data)
|
||||
elif int(data["spec"]["_meta"]["version"]) == 2:
|
||||
spec = SpecfileV2.load(data)
|
||||
else:
|
||||
elif int(data["spec"]["_meta"]["version"]) == 3:
|
||||
spec = SpecfileV3.load(data)
|
||||
else:
|
||||
spec = SpecfileV4.load(data)
|
||||
|
||||
# Any git version should
|
||||
for s in spec.traverse():
|
||||
@@ -2519,6 +2597,7 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||
def _replace_with(self, concrete):
|
||||
"""Replace this virtual spec with a concrete spec."""
|
||||
assert self.virtual
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
@@ -2529,7 +2608,11 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -3170,7 +3253,9 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
@@ -3226,7 +3311,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3563,15 +3648,20 @@ def _constrain_dependencies(self, other):
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3955,7 +4045,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4323,7 +4413,7 @@ def write_attribute(spec, attribute, color):
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
if not current:
|
||||
if current is None:
|
||||
# We're not printing anything
|
||||
return
|
||||
|
||||
@@ -4625,12 +4715,16 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4720,11 +4814,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def reconstruct_virtuals_on_edges(spec):
|
||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||
|
||||
Args:
|
||||
spec: spec on which we want to reconstruct virtuals
|
||||
"""
|
||||
# Collect all possible virtuals
|
||||
possible_virtuals = set()
|
||||
for node in spec.traverse():
|
||||
try:
|
||||
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||
except Exception as e:
|
||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||
continue
|
||||
|
||||
# Assume all incoming edges to provider are marked with virtuals=
|
||||
for vspec in possible_virtuals:
|
||||
try:
|
||||
provider = spec[vspec]
|
||||
except KeyError:
|
||||
# Virtual not in the DAG
|
||||
continue
|
||||
|
||||
for edge in provider.edges_from_dependents():
|
||||
edge.update_virtuals([vspec])
|
||||
|
||||
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
@@ -4808,7 +4931,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@@ -4839,8 +4962,10 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
@@ -4874,9 +4999,10 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4905,18 +5031,20 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
virtuals = []
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4950,7 +5078,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4970,6 +5098,20 @@ class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class SpecfileV4(SpecfileV2):
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
return cls._load(data)
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import textwrap
|
||||
from typing import List
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -66,17 +66,17 @@ def to_dict(self):
|
||||
return dict(d)
|
||||
|
||||
|
||||
def make_environment(dirs=None):
|
||||
"""Returns an configured environment for template rendering."""
|
||||
@llnl.util.lang.memoized
|
||||
def make_environment(dirs: Optional[Tuple[str, ...]] = None):
|
||||
"""Returns a configured environment for template rendering."""
|
||||
# Import at this scope to avoid slowing Spack startup down
|
||||
import jinja2
|
||||
|
||||
if dirs is None:
|
||||
# Default directories where to search for templates
|
||||
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
||||
extensions = spack.extensions.get_template_dirs()
|
||||
dirs = [canonicalize_path(d) for d in itertools.chain(builtins, extensions)]
|
||||
|
||||
# avoid importing this at the top level as it's used infrequently and
|
||||
# slows down startup a bit.
|
||||
import jinja2
|
||||
dirs = tuple(canonicalize_path(d) for d in itertools.chain(builtins, extensions))
|
||||
|
||||
# Loader for the templates
|
||||
loader = jinja2.FileSystemLoader(dirs)
|
||||
|
||||
@@ -115,9 +115,6 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -204,12 +201,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
|
||||
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -217,13 +214,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-au", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-afu", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -249,35 +246,10 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
|
||||
def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
"""
|
||||
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install 'corge' without using a cache
|
||||
install_cmd("--no-cache", cspec.name)
|
||||
|
||||
# Create build cache with relative rpaths
|
||||
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -294,19 +266,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -323,7 +295,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
cspec = Spec("corge").concretized()
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome):
|
||||
@@ -404,7 +376,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
install_cmd(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
@@ -433,8 +405,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
install_cmd("--no-cache", s.name)
|
||||
|
||||
# Create a buildcache and update index
|
||||
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
# Check package and dependency in buildcache
|
||||
cache_list = buildcache_cmd("list", "--allarch")
|
||||
@@ -446,7 +418,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
with spack.config.override("config:binary_index_ttl", 0):
|
||||
# Check dependency not in buildcache
|
||||
@@ -522,10 +494,10 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -541,7 +513,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.create
|
||||
@@ -12,8 +10,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
|
||||
@@ -291,7 +291,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -39,7 +40,10 @@ def test_dump(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
if sys.platform == "win32":
|
||||
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||
else:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
|
||||
@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
|
||||
add(pkg)
|
||||
install()
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
"""Ensure that buildcache create fails when given no args or
|
||||
environment."""
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
|
||||
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
|
||||
|
||||
tmpdir.chmod(0)
|
||||
with pytest.raises(OSError) as error:
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
|
||||
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
|
||||
assert error.value.errno == errno.EACCES
|
||||
tmpdir.chmod(0o700)
|
||||
|
||||
@@ -159,11 +159,11 @@ def test_update_key_index(
|
||||
# Put installed package in the buildcache, which, because we're signing
|
||||
# it, should result in the public key getting pushed to the buildcache
|
||||
# as well.
|
||||
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache("push", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
# Now make sure that when we pass the "--keys" argument to update-index
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
|
||||
@@ -213,27 +213,25 @@ def verify_mirror_contents():
|
||||
# Install a package and put it in the buildcache
|
||||
s = Spec(out_env_pkg).concretized()
|
||||
install(s.name)
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add(in_env_pkg)
|
||||
install()
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
|
||||
|
||||
# Now run the spack buildcache sync command with all the various options
|
||||
# for specifying mirrors
|
||||
|
||||
# Use urls to specify mirrors
|
||||
buildcache(
|
||||
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
|
||||
)
|
||||
buildcache("sync", src_mirror_url, dest_mirror_url)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
# Use local directory paths to specify fs locations
|
||||
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
|
||||
buildcache("sync", src_mirror_dir, dest_mirror_dir)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
@@ -242,7 +240,7 @@ def verify_mirror_contents():
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
|
||||
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
@@ -260,7 +258,7 @@ def test_buildcache_create_install(
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -324,12 +322,12 @@ def fake_push(node, push_url, options):
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
|
||||
|
||||
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
|
||||
buildcache_create_args = ["create", "--unsigned"]
|
||||
|
||||
if things_to_install != "":
|
||||
buildcache_create_args.extend(["--only", things_to_install])
|
||||
|
||||
buildcache_create_args.extend([slash_hash])
|
||||
buildcache_create_args.extend([str(tmpdir), slash_hash])
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
|
||||
@@ -1055,7 +1055,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -1155,8 +1155,8 @@ def test_ci_generate_mirror_override(
|
||||
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
@@ -1297,7 +1297,7 @@ def test_push_mirror_contents(
|
||||
mirror_cmd("rm", "test-ci")
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url)
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
index_path = os.path.join(buildcache_path, "index.json")
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
@@ -1613,7 +1613,7 @@ def test_ci_rebuild_index(
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
@@ -1647,7 +1647,7 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
install_cmd("gcc@=12.2.0%gcc@10.2.1")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
|
||||
@@ -1662,7 +1662,7 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
|
||||
|
||||
# Put spec built with installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
|
||||
# Now uninstall the spec
|
||||
uninstall_cmd("-y", "b%gcc@12.2.0")
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.compilers
|
||||
import spack.main
|
||||
import spack.version
|
||||
@@ -18,124 +16,8 @@
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compiler_version():
|
||||
return "4.5.3"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_compiler_dir(tmpdir, mock_compiler_version):
|
||||
"""Return a directory containing a fake, but detectable compiler."""
|
||||
|
||||
tmpdir.ensure("bin", dir=True)
|
||||
bin_dir = tmpdir.join("bin")
|
||||
|
||||
gcc_path = bin_dir.join("gcc")
|
||||
gxx_path = bin_dir.join("g++")
|
||||
gfortran_path = bin_dir.join("gfortran")
|
||||
|
||||
gcc_path.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '%s'
|
||||
fi
|
||||
done
|
||||
"""
|
||||
% mock_compiler_version
|
||||
)
|
||||
|
||||
# Create some mock compilers in the temporary directory
|
||||
llnl.util.filesystem.set_executable(str(gcc_path))
|
||||
gcc_path.copy(gxx_path, mode=True)
|
||||
gcc_path.copy(gfortran_path, mode=True)
|
||||
|
||||
return str(tmpdir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
echo "0.0.0"
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# make a script to emulate apple gcc's version args
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
|
||||
# Compilers available by default.
|
||||
old_compilers = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
|
||||
# Ensure new compiler is in there
|
||||
new_compilers = set(spack.compilers.all_compiler_specs())
|
||||
new_compiler = new_compilers - old_compilers
|
||||
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clangdir(tmpdir):
|
||||
"""Create a directory with some dummy compiler scripts in it.
|
||||
def compilers_dir(mock_executable):
|
||||
"""Create a directory with some mock compiler scripts in it.
|
||||
|
||||
Scripts are:
|
||||
- clang
|
||||
@@ -145,11 +27,9 @@ def clangdir(tmpdir):
|
||||
- gfortran-8
|
||||
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("clang", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
clang_path = mock_executable(
|
||||
"clang",
|
||||
output="""
|
||||
if [ "$1" = "--version" ]; then
|
||||
echo "clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
@@ -159,12 +39,11 @@ def clangdir(tmpdir):
|
||||
echo "clang: error: no input files"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
)
|
||||
shutil.copy("clang", "clang++")
|
||||
""",
|
||||
)
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++")
|
||||
|
||||
gcc_script = """\
|
||||
#!/bin/sh
|
||||
gcc_script = """
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "8"
|
||||
elif [ "$1" = "-dumpfullversion" ]; then
|
||||
@@ -178,120 +57,187 @@ def clangdir(tmpdir):
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
with open("gcc-8", "w") as f:
|
||||
f.write(gcc_script.format("gcc", "gcc-8"))
|
||||
with open("g++-8", "w") as f:
|
||||
f.write(gcc_script.format("g++", "g++-8"))
|
||||
with open("gfortran-8", "w") as f:
|
||||
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
os.chmod("clang", 0o700)
|
||||
os.chmod("clang++", 0o700)
|
||||
os.chmod("gcc-8", 0o700)
|
||||
os.chmod("g++-8", 0o700)
|
||||
os.chmod("gfortran-8", 0o700)
|
||||
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
|
||||
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
|
||||
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
|
||||
yield tmpdir
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
is given.
|
||||
"""
|
||||
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
|
||||
Clang with a few tweaks.
|
||||
"""
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output="""
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output=f"""\
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '{expected_version}'
|
||||
fi
|
||||
done
|
||||
""",
|
||||
)
|
||||
bin_dir = gcc_path.parent
|
||||
root_dir = bin_dir.parent
|
||||
|
||||
compilers_before_find = set(spack.compilers.all_compiler_specs())
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
compilers_after_find = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
compilers_added_by_find = compilers_after_find - compilers_before_find
|
||||
assert len(compilers_added_by_find) == 1
|
||||
new_compiler = compilers_added_by_find.pop()
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
gfortran_path = str(clangdir.join("gfortran-8"))
|
||||
gfortran_path = str(compilers_dir / "gfortran-8")
|
||||
|
||||
assert clang["paths"] == {
|
||||
"cc": str(clangdir.join("clang")),
|
||||
"cxx": str(clangdir.join("clang++")),
|
||||
"cc": str(compilers_dir / "clang"),
|
||||
"cxx": str(compilers_dir / "clang++"),
|
||||
# we only auto-detect mixed clang on macos
|
||||
"f77": gfortran_path if sys.platform == "darwin" else None,
|
||||
"fc": gfortran_path if sys.platform == "darwin" else None,
|
||||
}
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("gcc-8")),
|
||||
"cxx": str(clangdir.join("g++-8")),
|
||||
"cc": str(compilers_dir / "gcc-8"),
|
||||
"cxx": str(compilers_dir / "g++-8"),
|
||||
"f77": gfortran_path,
|
||||
"fc": gfortran_path,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
with clangdir.as_cwd():
|
||||
shutil.copy("clang", "clang-gpu")
|
||||
shutil.copy("clang++", "clang++-gpu")
|
||||
os.chmod("clang-gpu", 0o700)
|
||||
os.chmod("clang++-gpu", 0o700)
|
||||
clang_path = compilers_dir / "clang"
|
||||
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
|
||||
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
|
||||
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we find compilers that come first in the PATH first"""
|
||||
|
||||
with clangdir.as_cwd():
|
||||
os.mkdir("first_in_path")
|
||||
shutil.copy("gcc-8", "first_in_path/gcc-8")
|
||||
shutil.copy("g++-8", "first_in_path/g++-8")
|
||||
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
|
||||
|
||||
# the first_in_path folder should be searched first
|
||||
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
new_dir.mkdir()
|
||||
for name in ("gcc-8", "g++-8", "gfortran-8"):
|
||||
shutil.copy(compilers_dir / name, new_dir / name)
|
||||
# Set PATH to have the new folder searched first
|
||||
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("first_in_path", "gcc-8")),
|
||||
"cxx": str(clangdir.join("first_in_path", "g++-8")),
|
||||
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
"cxx": str(new_dir / "g++-8"),
|
||||
"f77": str(new_dir / "gfortran-8"),
|
||||
"fc": str(new_dir / "gfortran-8"),
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
|
||||
# Spack should not automatically search for compilers when listing them and none
|
||||
# are available. And when stdout is not a tty like in tests, there should be no
|
||||
# output and no error exit code.
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
||||
@@ -32,7 +32,7 @@ def check_develop(self, env, spec, path=None):
|
||||
assert dev_specs_entry["spec"] == str(spec)
|
||||
|
||||
# check yaml representation
|
||||
yaml = ev.config_dict(env.manifest)
|
||||
yaml = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
assert spec.name in yaml["develop"]
|
||||
yaml_entry = yaml["develop"][spec.name]
|
||||
assert yaml_entry["spec"] == str(spec)
|
||||
|
||||
@@ -390,6 +390,19 @@ def test_remove_after_concretize():
|
||||
assert not any(s.name == "mpileaks" for s in env_specs)
|
||||
|
||||
|
||||
def test_remove_before_concretize():
|
||||
e = ev.create("test")
|
||||
e.unify = True
|
||||
|
||||
e.add("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
e.remove("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
assert not list(e.concretized_specs())
|
||||
|
||||
|
||||
def test_remove_command():
|
||||
env("create", "test")
|
||||
assert "test" in env("list")
|
||||
@@ -2389,6 +2402,21 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.regression("38510")
|
||||
def test_concretize_separately_abstract_hash(install_mockery, mock_fetch):
|
||||
"""Check that a root can have no name if it has a hash."""
|
||||
s = Spec("trivial-install-test-package").concretized()
|
||||
install(str(s))
|
||||
|
||||
e = ev.create("test")
|
||||
e.unify = False
|
||||
|
||||
e.add(f"/{s.dag_hash()}")
|
||||
e.concretize()
|
||||
|
||||
assert list(e.concretized_specs()) == [(Spec(f"/{s.dag_hash()}"), s)]
|
||||
|
||||
|
||||
def test_concretize_user_specs_together():
|
||||
e = ev.create("coconcretization")
|
||||
e.unify = True
|
||||
@@ -2675,7 +2703,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
@@ -2711,12 +2739,12 @@ def test_multiple_modules_post_env_hook(environment_from_manifest, install_mocke
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
|
||||
full_modules_glob = "%s/full_modules/**/*" % e.path
|
||||
full_modules_glob = "%s/full_modules/**/*/*" % e.path
|
||||
full_modules = glob.glob(full_modules_glob)
|
||||
assert len(full_modules) == 1
|
||||
full_module = full_modules[0]
|
||||
@@ -3299,3 +3327,22 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
|
||||
assert dir_name in out
|
||||
assert env_dir in ev.root(dir_name)
|
||||
assert os.path.isdir(os.path.join(env_dir, dir_name))
|
||||
|
||||
|
||||
def test_environment_created_from_lockfile_has_view(mock_packages, tmpdir):
|
||||
"""When an env is created from a lockfile, a view should be generated for it"""
|
||||
env_a = str(tmpdir.join("a"))
|
||||
env_b = str(tmpdir.join("b"))
|
||||
|
||||
# Create an environment and install a package in it
|
||||
env("create", "-d", env_a)
|
||||
with ev.Environment(env_a):
|
||||
add("libelf")
|
||||
install("--fake")
|
||||
|
||||
# Create another environment from the lockfile of the first environment
|
||||
env("create", "-d", env_b, os.path.join(env_a, "spack.lock"))
|
||||
|
||||
# Make sure the view was created
|
||||
with ev.Environment(env_b) as e:
|
||||
assert os.path.isdir(e.view_path_default)
|
||||
|
||||
@@ -44,9 +44,8 @@ def define_plat_exe(exe):
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
executables_found(
|
||||
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
|
||||
)
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -71,7 +70,7 @@ def test_find_external_two_instances_same_package(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -107,7 +106,7 @@ def test_get_executables(working_env, mock_executable):
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
assert path_to_exe[cmake_path1] == cmake_exe
|
||||
assert path_to_exe[str(cmake_path1)] == cmake_exe
|
||||
|
||||
|
||||
external = SpackCommand("external")
|
||||
@@ -334,7 +333,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
||||
assert "extra_attributes" in external_gcc
|
||||
extra_attributes = external_gcc["extra_attributes"]
|
||||
assert "prefix" not in extra_attributes
|
||||
assert extra_attributes["compilers"]["c"] == gcc_exe
|
||||
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
|
||||
@@ -357,3 +357,18 @@ def test_find_loaded(database, working_env):
|
||||
output = find("--loaded")
|
||||
expected = find()
|
||||
assert output == expected
|
||||
|
||||
|
||||
@pytest.mark.regression("37712")
|
||||
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
"""Tests that having an active environment with a root spec containing a compiler constrained
|
||||
by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error
|
||||
when invoking "spack find".
|
||||
"""
|
||||
test_environment = ev.create_in_dir(tmp_path)
|
||||
test_environment.add("zlib %gcc@12.1.0")
|
||||
test_environment.write()
|
||||
|
||||
with test_environment:
|
||||
output = find()
|
||||
assert "zlib%gcc@12.1.0" in output
|
||||
|
||||
@@ -3,16 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
graph = SpackCommand("graph")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.usefixtures("mock_packages", "database")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,8 +12,6 @@
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def parser():
|
||||
|
||||
@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
|
||||
|
||||
# Populate the buildcache
|
||||
install(package_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
|
||||
|
||||
# Uninstall the all of the packages for clean slate
|
||||
uninstall("-y", "-a")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -15,8 +13,6 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
||||
@@ -235,7 +235,7 @@ def test_mirror_destroy(
|
||||
|
||||
# Put a binary package in a buildcache
|
||||
install("--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
@@ -245,7 +245,7 @@ def test_mirror_destroy(
|
||||
|
||||
assert not os.path.exists(mirror_dir.strpath)
|
||||
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
|
||||
@@ -157,7 +157,7 @@ def _parse_types(string):
|
||||
|
||||
|
||||
def test_spec_deptypes_nodes():
|
||||
output = spec("--types", "--cover", "nodes", "dt-diamond")
|
||||
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
|
||||
|
||||
|
||||
def test_spec_deptypes_edges():
|
||||
output = spec("--types", "--cover", "edges", "dt-diamond")
|
||||
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
|
||||
@@ -319,3 +319,17 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
filename = spack.cmd.test.report_filename(args, suite)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
||||
|
||||
def test_test_output_multiple_specs(
|
||||
mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
|
||||
):
|
||||
"""Ensure proper reporting for suite with skipped, failing, and passed tests."""
|
||||
install("test-error", "simple-standalone-test@0.9", "simple-standalone-test@1.0")
|
||||
out = spack_test("run", "test-error", "simple-standalone-test", fail_on_error=False)
|
||||
|
||||
# Note that a spec with passing *and* skipped tests is still considered
|
||||
# to have passed at this level. If you want to see the spec-specific
|
||||
# part result summaries, you'll have to look at the "test-out.txt" files
|
||||
# for each spec.
|
||||
assert "1 failed, 2 passed of 3 specs" in out
|
||||
|
||||
@@ -337,8 +337,6 @@ def test_compiler_flags_differ_identical_compilers(self):
|
||||
|
||||
# Get the compiler that matches the spec (
|
||||
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
|
||||
# Clear cache for compiler config since it has its own cache mechanism outside of config
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
# Configure spack to have two identical compilers with different flags
|
||||
default_dict = spack.compilers._to_dict(compiler)
|
||||
@@ -2137,7 +2135,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"modules": [],
|
||||
}
|
||||
@@ -2172,3 +2170,14 @@ def test_concretization_with_compilers_supporting_target_any(self):
|
||||
with spack.config.override("compilers", compiler_configuration):
|
||||
s = spack.spec.Spec("a").concretized()
|
||||
assert s.satisfies("%gcc@12.1.0")
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
|
||||
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
|
||||
"""Tests that information on virtuals is annotated on DAG edges"""
|
||||
spec = default_mock_concretization(spec_str)
|
||||
mpi_provider = spec["mpi"].name
|
||||
|
||||
edges = spec.edges_to_dependencies(name=mpi_provider)
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
@@ -367,8 +367,11 @@ def test_requirement_adds_multiple_new_versions(
|
||||
def test_preference_adds_new_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
"""Normally a preference cannot define a new version, but that constraint
|
||||
is ignored if the version is a Git hash-based version.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
@@ -391,6 +394,29 @@ def test_preference_adds_new_version(
|
||||
assert not s3.satisfies("@2.3")
|
||||
|
||||
|
||||
def test_external_adds_new_version_that_is_preferred(concretize_scope, test_repo):
|
||||
"""Test that we can use a version, not declared in package recipe, as the
|
||||
preferred version if that version appears in an external spec.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
version: ["2.7"]
|
||||
externals:
|
||||
- spec: y@2.7 # Not defined in y
|
||||
prefix: /fake/nonexistent/path/
|
||||
buildable: false
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
|
||||
spec = Spec("x").concretized()
|
||||
assert spec["y"].satisfies("@2.7")
|
||||
assert spack.version.Version("2.7") not in spec["y"].package.versions
|
||||
|
||||
|
||||
def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
||||
"""If a simple requirement can be satisfied, make sure the
|
||||
concretization succeeds and the requirement spec is applied.
|
||||
|
||||
@@ -1669,22 +1669,21 @@ def clear_directive_functions():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_executable(tmpdir):
|
||||
def mock_executable(tmp_path):
|
||||
"""Factory to create a mock executable in a temporary directory that
|
||||
output a custom string when run.
|
||||
"""
|
||||
import jinja2
|
||||
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
f = tmpdir.ensure(*subdir, dir=True).join(name)
|
||||
executable_dir = tmp_path.joinpath(*subdir)
|
||||
executable_dir.mkdir(parents=True, exist_ok=True)
|
||||
executable_path = executable_dir / name
|
||||
if sys.platform == "win32":
|
||||
f += ".bat"
|
||||
t = jinja2.Template("{{ shebang }}{{ output }}\n")
|
||||
f.write(t.render(shebang=shebang, output=output))
|
||||
f.chmod(0o755)
|
||||
return str(f)
|
||||
executable_path = executable_dir / (name + ".bat")
|
||||
executable_path.write_text(f"{ shebang }{ output }\n")
|
||||
executable_path.chmod(0o755)
|
||||
return executable_path
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ lmod:
|
||||
hash_length: 0
|
||||
|
||||
core_compilers:
|
||||
- 'clang@3.3'
|
||||
- 'clang@12.0.0'
|
||||
|
||||
core_specs:
|
||||
- 'mpich@3.0.1'
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
||||
@@ -0,0 +1,5 @@
|
||||
enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@=12.0.0'
|
||||
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
Binary file not shown.
@@ -31,194 +31,164 @@ class Amdfftw(FftwBase):
|
||||
Example : spack install amdfftw precision=float
|
||||
"""
|
||||
|
||||
_name = 'amdfftw'
|
||||
_name = "amdfftw"
|
||||
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/amd-fftw.git"
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version('3.1', sha256='3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132')
|
||||
version('3.0.1', sha256='87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c')
|
||||
version('3.0', sha256='a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd')
|
||||
version('2.2', sha256='de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8')
|
||||
version("3.1", sha256="3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132")
|
||||
version("3.0.1", sha256="87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c")
|
||||
version("3.0", sha256="a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd")
|
||||
version("2.2", sha256="de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8")
|
||||
|
||||
variant('shared', default=True,
|
||||
description='Builds a shared version of the library')
|
||||
variant('openmp', default=True,
|
||||
description='Enable OpenMP support')
|
||||
variant('threads', default=False,
|
||||
description='Enable SMP threads support')
|
||||
variant('debug', default=False,
|
||||
description='Builds a debug version of the library')
|
||||
variant("shared", default=True, description="Builds a shared version of the library")
|
||||
variant("openmp", default=True, description="Enable OpenMP support")
|
||||
variant("threads", default=False, description="Enable SMP threads support")
|
||||
variant("debug", default=False, description="Builds a debug version of the library")
|
||||
variant(
|
||||
'amd-fast-planner',
|
||||
"amd-fast-planner",
|
||||
default=False,
|
||||
description='Option to reduce the planning time without much'
|
||||
'tradeoff in the performance. It is supported for'
|
||||
'Float and double precisions only.')
|
||||
description="Option to reduce the planning time without much"
|
||||
"tradeoff in the performance. It is supported for"
|
||||
"Float and double precisions only.",
|
||||
)
|
||||
variant("amd-top-n-planner", default=False, description="Build with amd-top-n-planner support")
|
||||
variant(
|
||||
'amd-top-n-planner',
|
||||
default=False,
|
||||
description='Build with amd-top-n-planner support')
|
||||
variant(
|
||||
'amd-mpi-vader-limit',
|
||||
default=False,
|
||||
description='Build with amd-mpi-vader-limit support')
|
||||
variant(
|
||||
'static',
|
||||
default=False,
|
||||
description='Build with static suppport')
|
||||
variant(
|
||||
'amd-trans',
|
||||
default=False,
|
||||
description='Build with amd-trans suppport')
|
||||
variant(
|
||||
'amd-app-opt',
|
||||
default=False,
|
||||
description='Build with amd-app-opt suppport')
|
||||
"amd-mpi-vader-limit", default=False, description="Build with amd-mpi-vader-limit support"
|
||||
)
|
||||
variant("static", default=False, description="Build with static suppport")
|
||||
variant("amd-trans", default=False, description="Build with amd-trans suppport")
|
||||
variant("amd-app-opt", default=False, description="Build with amd-app-opt suppport")
|
||||
|
||||
depends_on('texinfo')
|
||||
depends_on("texinfo")
|
||||
|
||||
provides('fftw-api@3', when='@2:')
|
||||
provides("fftw-api@3", when="@2:")
|
||||
|
||||
conflicts(
|
||||
'precision=quad',
|
||||
when='@2.2 %aocc',
|
||||
msg='Quad precision is not supported by AOCC clang version 2.2')
|
||||
"precision=quad",
|
||||
when="@2.2 %aocc",
|
||||
msg="Quad precision is not supported by AOCC clang version 2.2",
|
||||
)
|
||||
conflicts(
|
||||
'+debug',
|
||||
when='@2.2 %aocc',
|
||||
msg='debug mode is not supported by AOCC clang version 2.2')
|
||||
"+debug", when="@2.2 %aocc", msg="debug mode is not supported by AOCC clang version 2.2"
|
||||
)
|
||||
conflicts("%gcc@:7.2", when="@2.2:", msg="GCC version above 7.2 is required for AMDFFTW")
|
||||
conflicts(
|
||||
'%gcc@:7.2',
|
||||
when='@2.2:',
|
||||
msg='GCC version above 7.2 is required for AMDFFTW')
|
||||
"+amd-fast-planner ", when="+mpi", msg="mpi thread is not supported with amd-fast-planner"
|
||||
)
|
||||
conflicts(
|
||||
'+amd-fast-planner ',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-fast-planner')
|
||||
"+amd-fast-planner", when="@2.2", msg="amd-fast-planner is supported from 3.0 onwards"
|
||||
)
|
||||
conflicts(
|
||||
'+amd-fast-planner',
|
||||
when='@2.2',
|
||||
msg='amd-fast-planner is supported from 3.0 onwards')
|
||||
"+amd-fast-planner",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-fast-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-fast-planner',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-fast-planner')
|
||||
"+amd-fast-planner",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-fast-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-fast-planner',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-fast-planner')
|
||||
"+amd-top-n-planner",
|
||||
when="@:3.0.0",
|
||||
msg="amd-top-n-planner is supported from 3.0.1 onwards",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='@:3.0.0',
|
||||
msg='amd-top-n-planner is supported from 3.0.1 onwards')
|
||||
"+amd-top-n-planner",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-top-n-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-top-n-planner')
|
||||
"+amd-top-n-planner",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-top-n-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-top-n-planner')
|
||||
"+amd-top-n-planner",
|
||||
when="+amd-fast-planner",
|
||||
msg="amd-top-n-planner cannot be used with amd-fast-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='+amd-fast-planner',
|
||||
msg='amd-top-n-planner cannot be used with amd-fast-planner')
|
||||
"+amd-top-n-planner", when="+threads", msg="amd-top-n-planner works only for single thread"
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='+threads',
|
||||
msg='amd-top-n-planner works only for single thread')
|
||||
"+amd-top-n-planner", when="+mpi", msg="mpi thread is not supported with amd-top-n-planner"
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-top-n-planner')
|
||||
"+amd-top-n-planner",
|
||||
when="+openmp",
|
||||
msg="openmp thread is not supported with amd-top-n-planner",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-top-n-planner',
|
||||
when='+openmp',
|
||||
msg='openmp thread is not supported with amd-top-n-planner')
|
||||
"+amd-mpi-vader-limit",
|
||||
when="@:3.0.0",
|
||||
msg="amd-mpi-vader-limit is supported from 3.0.1 onwards",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-mpi-vader-limit',
|
||||
when='@:3.0.0',
|
||||
msg='amd-mpi-vader-limit is supported from 3.0.1 onwards')
|
||||
"+amd-mpi-vader-limit",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-mpi-vader-limit",
|
||||
)
|
||||
conflicts("+amd-trans", when="+threads", msg="amd-trans works only for single thread")
|
||||
conflicts("+amd-trans", when="+mpi", msg="mpi thread is not supported with amd-trans")
|
||||
conflicts("+amd-trans", when="+openmp", msg="openmp thread is not supported with amd-trans")
|
||||
conflicts(
|
||||
'+amd-mpi-vader-limit',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-mpi-vader-limit')
|
||||
"+amd-trans",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-trans",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='+threads',
|
||||
msg='amd-trans works only for single thread')
|
||||
"+amd-trans", when="precision=quad", msg="Quad precision is not supported with amd-trans"
|
||||
)
|
||||
conflicts("+amd-app-opt", when="@:3.0.1", msg="amd-app-opt is supported from 3.1 onwards")
|
||||
conflicts("+amd-app-opt", when="+mpi", msg="mpi thread is not supported with amd-app-opt")
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-trans')
|
||||
"+amd-app-opt",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-app-opt",
|
||||
)
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='+openmp',
|
||||
msg='openmp thread is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='@:3.0.1',
|
||||
msg='amd-app-opt is supported from 3.1 onwards')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-app-opt')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-app-opt')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-app-opt')
|
||||
"+amd-app-opt",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-app-opt",
|
||||
)
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Configure function"""
|
||||
# Base options
|
||||
options = [
|
||||
'--prefix={0}'.format(prefix),
|
||||
'--enable-amd-opt'
|
||||
]
|
||||
options = ["--prefix={0}".format(prefix), "--enable-amd-opt"]
|
||||
|
||||
# Check if compiler is AOCC
|
||||
if '%aocc' in spec:
|
||||
options.append('CC={0}'.format(os.path.basename(spack_cc)))
|
||||
options.append('FC={0}'.format(os.path.basename(spack_fc)))
|
||||
options.append('F77={0}'.format(os.path.basename(spack_fc)))
|
||||
if "%aocc" in spec:
|
||||
options.append("CC={0}".format(os.path.basename(spack_cc)))
|
||||
options.append("FC={0}".format(os.path.basename(spack_fc)))
|
||||
options.append("F77={0}".format(os.path.basename(spack_fc)))
|
||||
|
||||
if '+debug' in spec:
|
||||
options.append('--enable-debug')
|
||||
if "+debug" in spec:
|
||||
options.append("--enable-debug")
|
||||
|
||||
if '+mpi' in spec:
|
||||
options.append('--enable-mpi')
|
||||
options.append('--enable-amd-mpifft')
|
||||
if "+mpi" in spec:
|
||||
options.append("--enable-mpi")
|
||||
options.append("--enable-amd-mpifft")
|
||||
else:
|
||||
options.append('--disable-mpi')
|
||||
options.append('--disable-amd-mpifft')
|
||||
options.append("--disable-mpi")
|
||||
options.append("--disable-amd-mpifft")
|
||||
|
||||
options.extend(self.enable_or_disable('shared'))
|
||||
options.extend(self.enable_or_disable('openmp'))
|
||||
options.extend(self.enable_or_disable('threads'))
|
||||
options.extend(self.enable_or_disable('amd-fast-planner'))
|
||||
options.extend(self.enable_or_disable('amd-top-n-planner'))
|
||||
options.extend(self.enable_or_disable('amd-mpi-vader-limit'))
|
||||
options.extend(self.enable_or_disable('static'))
|
||||
options.extend(self.enable_or_disable('amd-trans'))
|
||||
options.extend(self.enable_or_disable('amd-app-opt'))
|
||||
options.extend(self.enable_or_disable("shared"))
|
||||
options.extend(self.enable_or_disable("openmp"))
|
||||
options.extend(self.enable_or_disable("threads"))
|
||||
options.extend(self.enable_or_disable("amd-fast-planner"))
|
||||
options.extend(self.enable_or_disable("amd-top-n-planner"))
|
||||
options.extend(self.enable_or_disable("amd-mpi-vader-limit"))
|
||||
options.extend(self.enable_or_disable("static"))
|
||||
options.extend(self.enable_or_disable("amd-trans"))
|
||||
options.extend(self.enable_or_disable("amd-app-opt"))
|
||||
|
||||
if not self.compiler.f77 or not self.compiler.fc:
|
||||
options.append('--disable-fortran')
|
||||
options.append("--disable-fortran")
|
||||
|
||||
# Cross compilation is supported in amd-fftw by making use of target
|
||||
# variable to set AMD_ARCH configure option.
|
||||
@@ -226,17 +196,16 @@ class Amdfftw(FftwBase):
|
||||
# use target variable to set appropriate -march option in AMD_ARCH.
|
||||
arch = spec.architecture
|
||||
options.append(
|
||||
'AMD_ARCH={0}'.format(
|
||||
arch.target.optimization_flags(
|
||||
spec.compiler).split('=')[-1]))
|
||||
"AMD_ARCH={0}".format(arch.target.optimization_flags(spec.compiler).split("=")[-1])
|
||||
)
|
||||
|
||||
# Specific SIMD support.
|
||||
# float and double precisions are supported
|
||||
simd_features = ['sse2', 'avx', 'avx2']
|
||||
simd_features = ["sse2", "avx", "avx2"]
|
||||
|
||||
simd_options = []
|
||||
for feature in simd_features:
|
||||
msg = '--enable-{0}' if feature in spec.target else '--disable-{0}'
|
||||
msg = "--enable-{0}" if feature in spec.target else "--disable-{0}"
|
||||
simd_options.append(msg.format(feature))
|
||||
|
||||
# When enabling configure option "--enable-amd-opt", do not use the
|
||||
@@ -246,20 +215,19 @@ class Amdfftw(FftwBase):
|
||||
# Double is the default precision, for all the others we need
|
||||
# to enable the corresponding option.
|
||||
enable_precision = {
|
||||
'float': ['--enable-float'],
|
||||
'double': None,
|
||||
'long_double': ['--enable-long-double'],
|
||||
'quad': ['--enable-quad-precision']
|
||||
"float": ["--enable-float"],
|
||||
"double": None,
|
||||
"long_double": ["--enable-long-double"],
|
||||
"quad": ["--enable-quad-precision"],
|
||||
}
|
||||
|
||||
# Different precisions must be configured and compiled one at a time
|
||||
configure = Executable('../configure')
|
||||
configure = Executable("../configure")
|
||||
for precision in self.selected_precisions:
|
||||
|
||||
opts = (enable_precision[precision] or []) + options[:]
|
||||
|
||||
# SIMD optimizations are available only for float and double
|
||||
if precision in ('float', 'double'):
|
||||
if precision in ("float", "double"):
|
||||
opts += simd_options
|
||||
|
||||
with working_dir(precision, create=True):
|
||||
|
||||
@@ -16,21 +16,21 @@ from spack.package import *
|
||||
|
||||
class Llvm(CMakePackage, CudaPackage):
|
||||
"""The LLVM Project is a collection of modular and reusable compiler and
|
||||
toolchain technologies. Despite its name, LLVM has little to do
|
||||
with traditional virtual machines, though it does provide helpful
|
||||
libraries that can be used to build them. The name "LLVM" itself
|
||||
is not an acronym; it is the full name of the project.
|
||||
toolchain technologies. Despite its name, LLVM has little to do
|
||||
with traditional virtual machines, though it does provide helpful
|
||||
libraries that can be used to build them. The name "LLVM" itself
|
||||
is not an acronym; it is the full name of the project.
|
||||
"""
|
||||
|
||||
homepage = "https://llvm.org/"
|
||||
url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz"
|
||||
list_url = "https://releases.llvm.org/download.html"
|
||||
git = "https://github.com/llvm/llvm-project"
|
||||
maintainers = ['trws', 'haampie']
|
||||
maintainers("trws", "haampie")
|
||||
|
||||
tags = ['e4s']
|
||||
tags = ["e4s"]
|
||||
|
||||
generator = 'Ninja'
|
||||
generator = "Ninja"
|
||||
|
||||
family = "compiler" # Used by lmod
|
||||
|
||||
@@ -80,13 +80,12 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# to save space, build with `build_type=Release`.
|
||||
|
||||
variant(
|
||||
"clang",
|
||||
default=True,
|
||||
description="Build the LLVM C/C++/Objective-C compiler frontend",
|
||||
"clang", default=True, description="Build the LLVM C/C++/Objective-C compiler frontend"
|
||||
)
|
||||
variant(
|
||||
"flang",
|
||||
default=False, when='@11: +clang',
|
||||
default=False,
|
||||
when="@11: +clang",
|
||||
description="Build the LLVM Fortran compiler frontend "
|
||||
"(experimental - parser only, needs GCC)",
|
||||
)
|
||||
@@ -95,27 +94,23 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
default=False,
|
||||
description="Include debugging code in OpenMP runtime libraries",
|
||||
)
|
||||
variant("lldb", default=True, when='+clang', description="Build the LLVM debugger")
|
||||
variant("lldb", default=True, when="+clang", description="Build the LLVM debugger")
|
||||
variant("lld", default=True, description="Build the LLVM linker")
|
||||
variant("mlir", default=False, when='@10:', description="Build with MLIR support")
|
||||
variant("mlir", default=False, when="@10:", description="Build with MLIR support")
|
||||
variant(
|
||||
"internal_unwind",
|
||||
default=True, when='+clang',
|
||||
description="Build the libcxxabi libunwind",
|
||||
"internal_unwind", default=True, when="+clang", description="Build the libcxxabi libunwind"
|
||||
)
|
||||
variant(
|
||||
"polly",
|
||||
default=True,
|
||||
description="Build the LLVM polyhedral optimization plugin, "
|
||||
"only builds for 3.7.0+",
|
||||
description="Build the LLVM polyhedral optimization plugin, " "only builds for 3.7.0+",
|
||||
)
|
||||
variant(
|
||||
"libcxx",
|
||||
default=True, when='+clang',
|
||||
description="Build the LLVM C++ standard library",
|
||||
"libcxx", default=True, when="+clang", description="Build the LLVM C++ standard library"
|
||||
)
|
||||
variant(
|
||||
"compiler-rt", when='+clang',
|
||||
"compiler-rt",
|
||||
when="+clang",
|
||||
default=True,
|
||||
description="Build LLVM compiler runtime, including sanitizers",
|
||||
)
|
||||
@@ -124,11 +119,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
default=(sys.platform != "darwin"),
|
||||
description="Add support for LTO with the gold linker plugin",
|
||||
)
|
||||
variant(
|
||||
"split_dwarf",
|
||||
default=False,
|
||||
description="Build with split dwarf information",
|
||||
)
|
||||
variant("split_dwarf", default=False, description="Build with split dwarf information")
|
||||
variant(
|
||||
"llvm_dylib",
|
||||
default=True,
|
||||
@@ -136,18 +127,40 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
)
|
||||
variant(
|
||||
"link_llvm_dylib",
|
||||
default=False, when='+llvm_dylib',
|
||||
default=False,
|
||||
when="+llvm_dylib",
|
||||
description="Link LLVM tools against the LLVM shared library",
|
||||
)
|
||||
variant(
|
||||
"targets",
|
||||
default="none",
|
||||
description=("What targets to build. Spack's target family is always added "
|
||||
"(e.g. X86 is automatically enabled when targeting znver2)."),
|
||||
values=("all", "none", "aarch64", "amdgpu", "arm", "avr", "bpf", "cppbackend",
|
||||
"hexagon", "lanai", "mips", "msp430", "nvptx", "powerpc", "riscv",
|
||||
"sparc", "systemz", "webassembly", "x86", "xcore"),
|
||||
multi=True
|
||||
description=(
|
||||
"What targets to build. Spack's target family is always added "
|
||||
"(e.g. X86 is automatically enabled when targeting znver2)."
|
||||
),
|
||||
values=(
|
||||
"all",
|
||||
"none",
|
||||
"aarch64",
|
||||
"amdgpu",
|
||||
"arm",
|
||||
"avr",
|
||||
"bpf",
|
||||
"cppbackend",
|
||||
"hexagon",
|
||||
"lanai",
|
||||
"mips",
|
||||
"msp430",
|
||||
"nvptx",
|
||||
"powerpc",
|
||||
"riscv",
|
||||
"sparc",
|
||||
"systemz",
|
||||
"webassembly",
|
||||
"x86",
|
||||
"xcore",
|
||||
),
|
||||
multi=True,
|
||||
)
|
||||
variant(
|
||||
"build_type",
|
||||
@@ -157,51 +170,52 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
)
|
||||
variant(
|
||||
"omp_tsan",
|
||||
default=False, when='@6:',
|
||||
default=False,
|
||||
when="@6:",
|
||||
description="Build with OpenMP capable thread sanitizer",
|
||||
)
|
||||
variant(
|
||||
"omp_as_runtime",
|
||||
default=True,
|
||||
when='+clang @12:',
|
||||
when="+clang @12:",
|
||||
description="Build OpenMP runtime via ENABLE_RUNTIME by just-built Clang",
|
||||
)
|
||||
variant('code_signing', default=False,
|
||||
when='+lldb platform=darwin',
|
||||
description="Enable code-signing on macOS")
|
||||
variant("python", default=False, description="Install python bindings")
|
||||
variant('version_suffix', default='none', description="Add a symbol suffix")
|
||||
variant(
|
||||
'shlib_symbol_version',
|
||||
default='none',
|
||||
"code_signing",
|
||||
default=False,
|
||||
when="+lldb platform=darwin",
|
||||
description="Enable code-signing on macOS",
|
||||
)
|
||||
variant("python", default=False, description="Install python bindings")
|
||||
variant("version_suffix", default="none", description="Add a symbol suffix")
|
||||
variant(
|
||||
"shlib_symbol_version",
|
||||
default="none",
|
||||
description="Add shared library symbol version",
|
||||
when='@13:'
|
||||
when="@13:",
|
||||
)
|
||||
variant(
|
||||
'z3',
|
||||
default=False,
|
||||
when='+clang @8:',
|
||||
description='Use Z3 for the clang static analyzer'
|
||||
"z3", default=False, when="+clang @8:", description="Use Z3 for the clang static analyzer"
|
||||
)
|
||||
|
||||
provides('libllvm@14', when='@14.0.0:14')
|
||||
provides('libllvm@13', when='@13.0.0:13')
|
||||
provides('libllvm@12', when='@12.0.0:12')
|
||||
provides('libllvm@11', when='@11.0.0:11')
|
||||
provides('libllvm@10', when='@10.0.0:10')
|
||||
provides('libllvm@9', when='@9.0.0:9')
|
||||
provides('libllvm@8', when='@8.0.0:8')
|
||||
provides('libllvm@7', when='@7.0.0:7')
|
||||
provides('libllvm@6', when='@6.0.0:6')
|
||||
provides('libllvm@5', when='@5.0.0:5')
|
||||
provides('libllvm@4', when='@4.0.0:4')
|
||||
provides('libllvm@3', when='@3.0.0:3')
|
||||
provides("libllvm@14", when="@14.0.0:14")
|
||||
provides("libllvm@13", when="@13.0.0:13")
|
||||
provides("libllvm@12", when="@12.0.0:12")
|
||||
provides("libllvm@11", when="@11.0.0:11")
|
||||
provides("libllvm@10", when="@10.0.0:10")
|
||||
provides("libllvm@9", when="@9.0.0:9")
|
||||
provides("libllvm@8", when="@8.0.0:8")
|
||||
provides("libllvm@7", when="@7.0.0:7")
|
||||
provides("libllvm@6", when="@6.0.0:6")
|
||||
provides("libllvm@5", when="@5.0.0:5")
|
||||
provides("libllvm@4", when="@4.0.0:4")
|
||||
provides("libllvm@3", when="@3.0.0:3")
|
||||
|
||||
extends("python", when="+python")
|
||||
|
||||
# Build dependency
|
||||
depends_on("cmake@3.4.3:", type="build")
|
||||
depends_on('cmake@3.13.4:', type='build', when='@12:')
|
||||
depends_on("cmake@3.13.4:", type="build", when="@12:")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on("python@2.7:2.8", when="@:4 ~python", type="build")
|
||||
depends_on("python", when="@5: ~python", type="build")
|
||||
@@ -242,7 +256,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# clang/lib: a lambda parameter cannot shadow an explicitly captured entity
|
||||
conflicts("%clang@8:", when="@:4")
|
||||
# Internal compiler error on gcc 8.4 on aarch64 https://bugzilla.redhat.com/show_bug.cgi?id=1958295
|
||||
conflicts('%gcc@8.4:8.4.9', when='@12: target=aarch64:')
|
||||
conflicts("%gcc@8.4:8.4.9", when="@12: target=aarch64:")
|
||||
|
||||
# When these versions are concretized, but not explicitly with +libcxx, these
|
||||
# conflicts will enable clingo to set ~libcxx, making the build successful:
|
||||
@@ -252,17 +266,17 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# GCC 11 - latest stable release per GCC release page
|
||||
# Clang: 11, 12 - latest two stable releases per LLVM release page
|
||||
# AppleClang 12 - latest stable release per Xcode release page
|
||||
conflicts("%gcc@:10", when="@13:+libcxx")
|
||||
conflicts("%clang@:10", when="@13:+libcxx")
|
||||
conflicts("%gcc@:10", when="@13:+libcxx")
|
||||
conflicts("%clang@:10", when="@13:+libcxx")
|
||||
conflicts("%apple-clang@:11", when="@13:+libcxx")
|
||||
|
||||
# libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
|
||||
conflicts('%gcc@7:', when='@:4+libcxx')
|
||||
conflicts('%clang@6:', when='@:4+libcxx')
|
||||
conflicts('%apple-clang@6:', when='@:4+libcxx')
|
||||
conflicts('%gcc@7:', when='@:4+compiler-rt')
|
||||
conflicts('%clang@6:', when='@:4+compiler-rt')
|
||||
conflicts('%apple-clang@6:', when='@:4+compiler-rt')
|
||||
conflicts("%gcc@7:", when="@:4+libcxx")
|
||||
conflicts("%clang@6:", when="@:4+libcxx")
|
||||
conflicts("%apple-clang@6:", when="@:4+libcxx")
|
||||
conflicts("%gcc@7:", when="@:4+compiler-rt")
|
||||
conflicts("%clang@6:", when="@:4+compiler-rt")
|
||||
conflicts("%apple-clang@6:", when="@:4+compiler-rt")
|
||||
|
||||
# cuda_arch value must be specified
|
||||
conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.")
|
||||
@@ -270,27 +284,27 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# LLVM bug https://bugs.llvm.org/show_bug.cgi?id=48234
|
||||
# CMake bug: https://gitlab.kitware.com/cmake/cmake/-/issues/21469
|
||||
# Fixed in upstream versions of both
|
||||
conflicts('^cmake@3.19.0', when='@6:11.0.0')
|
||||
conflicts("^cmake@3.19.0", when="@6:11.0.0")
|
||||
|
||||
# Github issue #4986
|
||||
patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:")
|
||||
|
||||
# sys/ustat.h has been removed in favour of statfs from glibc-2.28. Use fixed sizes:
|
||||
patch('llvm5-sanitizer-ustat.patch', when="@4:6.0.0+compiler-rt")
|
||||
patch("llvm5-sanitizer-ustat.patch", when="@4:6.0.0+compiler-rt")
|
||||
|
||||
# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
|
||||
patch('llvm4-lld-ELF-Symbols.patch', when="@4+lld%clang@6:")
|
||||
patch('llvm5-lld-ELF-Symbols.patch', when="@5+lld%clang@7:")
|
||||
patch("llvm4-lld-ELF-Symbols.patch", when="@4+lld%clang@6:")
|
||||
patch("llvm5-lld-ELF-Symbols.patch", when="@5+lld%clang@7:")
|
||||
|
||||
# Fix missing std:size_t in 'llvm@4:5' when built with '%clang@7:'
|
||||
patch('xray_buffer_queue-cstddef.patch', when="@4:5+compiler-rt%clang@7:")
|
||||
patch("xray_buffer_queue-cstddef.patch", when="@4:5+compiler-rt%clang@7:")
|
||||
|
||||
# https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
|
||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:7+compiler-rt%clang@11:")
|
||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:9+compiler-rt%gcc@9:")
|
||||
patch("sanitizer-ipc_perm_mode.patch", when="@5:7+compiler-rt%clang@11:")
|
||||
patch("sanitizer-ipc_perm_mode.patch", when="@5:9+compiler-rt%gcc@9:")
|
||||
|
||||
# github.com/spack/spack/issues/24270: MicrosoftDemangle for %gcc@10: and %clang@13:
|
||||
patch('missing-includes.patch', when='@8')
|
||||
patch("missing-includes.patch", when="@8")
|
||||
|
||||
# Backport from llvm master + additional fix
|
||||
# see https://bugs.llvm.org/show_bug.cgi?id=39696
|
||||
@@ -315,33 +329,33 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
patch("llvm_python_path.patch", when="@:11")
|
||||
|
||||
# Workaround for issue https://github.com/spack/spack/issues/18197
|
||||
patch('llvm7_intel.patch', when='@7 %intel@18.0.2,19.0.0:19.1.99')
|
||||
patch("llvm7_intel.patch", when="@7 %intel@18.0.2,19.0.0:19.1.99")
|
||||
|
||||
# Remove cyclades support to build against newer kernel headers
|
||||
# https://reviews.llvm.org/D102059
|
||||
patch('no_cyclades.patch', when='@10:12.0.0')
|
||||
patch('no_cyclades9.patch', when='@6:9')
|
||||
patch("no_cyclades.patch", when="@10:12.0.0")
|
||||
patch("no_cyclades9.patch", when="@6:9")
|
||||
|
||||
patch('llvm-gcc11.patch', when='@9:11%gcc@11:')
|
||||
patch("llvm-gcc11.patch", when="@9:11%gcc@11:")
|
||||
|
||||
# add -lpthread to build OpenMP libraries with Fujitsu compiler
|
||||
patch('llvm12-thread.patch', when='@12 %fj')
|
||||
patch('llvm13-thread.patch', when='@13 %fj')
|
||||
patch("llvm12-thread.patch", when="@12 %fj")
|
||||
patch("llvm13-thread.patch", when="@13 %fj")
|
||||
|
||||
# avoid build failed with Fujitsu compiler
|
||||
patch('llvm13-fujitsu.patch', when='@13 %fj')
|
||||
patch("llvm13-fujitsu.patch", when="@13 %fj")
|
||||
|
||||
# patch for missing hwloc.h include for libompd
|
||||
patch('llvm14-hwloc-ompd.patch', when='@14')
|
||||
patch("llvm14-hwloc-ompd.patch", when="@14")
|
||||
|
||||
# make libflags a list in openmp subproject when ~omp_as_runtime
|
||||
patch('libomp-libflags-as-list.patch', when='@3.7:')
|
||||
patch("libomp-libflags-as-list.patch", when="@3.7:")
|
||||
|
||||
# The functions and attributes below implement external package
|
||||
# detection for LLVM. See:
|
||||
#
|
||||
# https://spack.readthedocs.io/en/latest/packaging_guide.html#making-a-package-discoverable-with-spack-external-find
|
||||
executables = ['clang', 'flang', 'ld.lld', 'lldb']
|
||||
executables = ["clang", "flang", "ld.lld", "lldb"]
|
||||
|
||||
@classmethod
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
@@ -351,7 +365,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# on some port and would hang Spack during detection.
|
||||
# clang-cl and clang-cpp are dev tools that we don't
|
||||
# need to test
|
||||
if any(x in exe for x in ('vscode', 'cpp', '-cl', '-gpu')):
|
||||
if any(x in exe for x in ("vscode", "cpp", "-cl", "-gpu")):
|
||||
continue
|
||||
result.append(exe)
|
||||
return result
|
||||
@@ -360,20 +374,20 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
def determine_version(cls, exe):
|
||||
version_regex = re.compile(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r'clang version ([^ )\n]+)-svn[~.\w\d-]*|'
|
||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r'clang version ([^ )\n]+?)-[~.\w\d-]*|'
|
||||
r'clang version ([^ )\n]+)|'
|
||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"clang version ([^ )\n]+)|"
|
||||
# LLDB
|
||||
r'lldb version ([^ )\n]+)|'
|
||||
r"lldb version ([^ )\n]+)|"
|
||||
# LLD
|
||||
r'LLD ([^ )\n]+) \(compatible with GNU linkers\)'
|
||||
r"LLD ([^ )\n]+) \(compatible with GNU linkers\)"
|
||||
)
|
||||
try:
|
||||
compiler = Executable(exe)
|
||||
output = compiler('--version', output=str, error=str)
|
||||
if 'Apple' in output:
|
||||
output = compiler("--version", output=str, error=str)
|
||||
if "Apple" in output:
|
||||
return None
|
||||
match = version_regex.search(output)
|
||||
if match:
|
||||
@@ -387,38 +401,39 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes, version_str):
|
||||
variants, compilers = ['+clang'], {}
|
||||
variants, compilers = ["+clang"], {}
|
||||
lld_found, lldb_found = False, False
|
||||
for exe in exes:
|
||||
if 'clang++' in exe:
|
||||
compilers['cxx'] = exe
|
||||
elif 'clang' in exe:
|
||||
compilers['c'] = exe
|
||||
elif 'flang' in exe:
|
||||
variants.append('+flang')
|
||||
compilers['fc'] = exe
|
||||
compilers['f77'] = exe
|
||||
elif 'ld.lld' in exe:
|
||||
if "clang++" in exe:
|
||||
compilers["cxx"] = exe
|
||||
elif "clang" in exe:
|
||||
compilers["c"] = exe
|
||||
elif "flang" in exe:
|
||||
variants.append("+flang")
|
||||
compilers["fc"] = exe
|
||||
compilers["f77"] = exe
|
||||
elif "ld.lld" in exe:
|
||||
lld_found = True
|
||||
compilers['ld'] = exe
|
||||
elif 'lldb' in exe:
|
||||
compilers["ld"] = exe
|
||||
elif "lldb" in exe:
|
||||
lldb_found = True
|
||||
compilers['lldb'] = exe
|
||||
compilers["lldb"] = exe
|
||||
|
||||
variants.append('+lld' if lld_found else '~lld')
|
||||
variants.append('+lldb' if lldb_found else '~lldb')
|
||||
variants.append("+lld" if lld_found else "~lld")
|
||||
variants.append("+lldb" if lldb_found else "~lldb")
|
||||
|
||||
return ''.join(variants), {'compilers': compilers}
|
||||
return "".join(variants), {"compilers": compilers}
|
||||
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
# For LLVM 'compilers' is a mandatory attribute
|
||||
msg = ('the extra attribute "compilers" must be set for '
|
||||
'the detected spec "{0}"'.format(spec))
|
||||
assert 'compilers' in extra_attributes, msg
|
||||
compilers = extra_attributes['compilers']
|
||||
for key in ('c', 'cxx'):
|
||||
msg = '{0} compiler not found for {1}'
|
||||
msg = 'the extra attribute "compilers" must be set for ' 'the detected spec "{0}"'.format(
|
||||
spec
|
||||
)
|
||||
assert "compilers" in extra_attributes, msg
|
||||
compilers = extra_attributes["compilers"]
|
||||
for key in ("c", "cxx"):
|
||||
msg = "{0} compiler not found for {1}"
|
||||
assert key in compilers, msg.format(key, spec)
|
||||
|
||||
@property
|
||||
@@ -426,10 +441,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve C compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes['compilers'].get('c', None)
|
||||
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||
result = None
|
||||
if '+clang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'clang')
|
||||
if "+clang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "clang")
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -437,10 +452,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve C++ compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes['compilers'].get('cxx', None)
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
result = None
|
||||
if '+clang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'clang++')
|
||||
if "+clang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "clang++")
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -448,10 +463,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve Fortran compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes['compilers'].get('fc', None)
|
||||
return self.spec.extra_attributes["compilers"].get("fc", None)
|
||||
result = None
|
||||
if '+flang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
||||
if "+flang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -459,27 +474,25 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve Fortran 77 compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes['compilers'].get('f77', None)
|
||||
return self.spec.extra_attributes["compilers"].get("f77", None)
|
||||
result = None
|
||||
if '+flang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
||||
if "+flang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||
return result
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
return LibraryList(self.llvm_config("--libfiles", "all",
|
||||
result="list"))
|
||||
return LibraryList(self.llvm_config("--libfiles", "all", result="list"))
|
||||
|
||||
@run_before('cmake')
|
||||
@run_before("cmake")
|
||||
def codesign_check(self):
|
||||
if self.spec.satisfies("+code_signing"):
|
||||
codesign = which('codesign')
|
||||
mkdir('tmp')
|
||||
llvm_check_file = join_path('tmp', 'llvm_check')
|
||||
copy('/usr/bin/false', llvm_check_file)
|
||||
codesign = which("codesign")
|
||||
mkdir("tmp")
|
||||
llvm_check_file = join_path("tmp", "llvm_check")
|
||||
copy("/usr/bin/false", llvm_check_file)
|
||||
try:
|
||||
codesign('-f', '-s', 'lldb_codesign', '--dryrun',
|
||||
llvm_check_file)
|
||||
codesign("-f", "-s", "lldb_codesign", "--dryrun", llvm_check_file)
|
||||
|
||||
except ProcessError:
|
||||
# Newer LLVM versions have a simple script that sets up
|
||||
@@ -489,32 +502,32 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
setup()
|
||||
except Exception:
|
||||
raise RuntimeError(
|
||||
'spack was unable to either find or set up'
|
||||
'code-signing on your system. Please refer to'
|
||||
'https://lldb.llvm.org/resources/build.html#'
|
||||
'code-signing-on-macos for details on how to'
|
||||
'create this identity.'
|
||||
"spack was unable to either find or set up"
|
||||
"code-signing on your system. Please refer to"
|
||||
"https://lldb.llvm.org/resources/build.html#"
|
||||
"code-signing-on-macos for details on how to"
|
||||
"create this identity."
|
||||
)
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name == 'cxxflags':
|
||||
if name == "cxxflags":
|
||||
flags.append(self.compiler.cxx11_flag)
|
||||
return(None, flags, None)
|
||||
elif name == 'ldflags' and self.spec.satisfies('%intel'):
|
||||
flags.append('-shared-intel')
|
||||
return(None, flags, None)
|
||||
return(flags, None, None)
|
||||
return (None, flags, None)
|
||||
elif name == "ldflags" and self.spec.satisfies("%intel"):
|
||||
flags.append("-shared-intel")
|
||||
return (None, flags, None)
|
||||
return (flags, None, None)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
|
||||
if self.compiler.name in ['clang', 'apple-clang']:
|
||||
for lld in 'ld.lld-{0}'.format(self.compiler.version.version[0]), 'ld.lld':
|
||||
if self.compiler.name in ["clang", "apple-clang"]:
|
||||
for lld in "ld.lld-{0}".format(self.compiler.version.version[0]), "ld.lld":
|
||||
bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
|
||||
sym = os.path.join(self.stage.path, 'ld.lld')
|
||||
sym = os.path.join(self.stage.path, "ld.lld")
|
||||
if os.path.exists(bin) and not os.path.exists(sym):
|
||||
mkdirp(self.stage.path)
|
||||
os.symlink(bin, sym)
|
||||
env.prepend_path('PATH', self.stage.path)
|
||||
env.prepend_path("PATH", self.stage.path)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
if "+clang" in self.spec:
|
||||
@@ -531,7 +544,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
define = CMakePackage.define
|
||||
from_variant = self.define_from_variant
|
||||
|
||||
python = spec['python']
|
||||
python = spec["python"]
|
||||
cmake_args = [
|
||||
define("LLVM_REQUIRES_RTTI", True),
|
||||
define("LLVM_ENABLE_RTTI", True),
|
||||
@@ -544,14 +557,13 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
|
||||
]
|
||||
|
||||
version_suffix = spec.variants['version_suffix'].value
|
||||
if version_suffix != 'none':
|
||||
cmake_args.append(define('LLVM_VERSION_SUFFIX', version_suffix))
|
||||
version_suffix = spec.variants["version_suffix"].value
|
||||
if version_suffix != "none":
|
||||
cmake_args.append(define("LLVM_VERSION_SUFFIX", version_suffix))
|
||||
|
||||
shlib_symbol_version = spec.variants.get('shlib_symbol_version', None)
|
||||
if shlib_symbol_version is not None and shlib_symbol_version.value != 'none':
|
||||
cmake_args.append(define('LLVM_SHLIB_SYMBOL_VERSION',
|
||||
shlib_symbol_version.value))
|
||||
shlib_symbol_version = spec.variants.get("shlib_symbol_version", None)
|
||||
if shlib_symbol_version is not None and shlib_symbol_version.value != "none":
|
||||
cmake_args.append(define("LLVM_SHLIB_SYMBOL_VERSION", shlib_symbol_version.value))
|
||||
|
||||
if python.version >= Version("3"):
|
||||
cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
|
||||
@@ -562,47 +574,56 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
runtimes = []
|
||||
|
||||
if "+cuda" in spec:
|
||||
cmake_args.extend([
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
||||
define("LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
||||
",".join(spec.variants["cuda_arch"].value)),
|
||||
define("CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
||||
"sm_{0}".format(spec.variants["cuda_arch"].value[-1])),
|
||||
])
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
||||
define(
|
||||
"LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
||||
",".join(spec.variants["cuda_arch"].value),
|
||||
),
|
||||
define(
|
||||
"CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
||||
"sm_{0}".format(spec.variants["cuda_arch"].value[-1]),
|
||||
),
|
||||
]
|
||||
)
|
||||
if "+omp_as_runtime" in spec:
|
||||
cmake_args.extend([
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
# work around bad libelf detection in libomptarget
|
||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
||||
spec["libelf"].prefix.include),
|
||||
])
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
# work around bad libelf detection in libomptarget
|
||||
define(
|
||||
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
# still build libomptarget but disable cuda
|
||||
cmake_args.extend([
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
||||
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
||||
])
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
||||
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
||||
]
|
||||
)
|
||||
|
||||
cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "omp_debug"))
|
||||
|
||||
if "+lldb" in spec:
|
||||
projects.append("lldb")
|
||||
cmake_args.append(define('LLDB_ENABLE_LIBEDIT', True))
|
||||
cmake_args.append(define('LLDB_ENABLE_NCURSES', True))
|
||||
cmake_args.append(define('LLDB_ENABLE_LIBXML2', False))
|
||||
if spec.version >= Version('10'):
|
||||
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", 'python'))
|
||||
cmake_args.append(define("LLDB_ENABLE_LIBEDIT", True))
|
||||
cmake_args.append(define("LLDB_ENABLE_NCURSES", True))
|
||||
cmake_args.append(define("LLDB_ENABLE_LIBXML2", False))
|
||||
if spec.version >= Version("10"):
|
||||
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", "python"))
|
||||
else:
|
||||
cmake_args.append(define("LLDB_DISABLE_PYTHON", '~python' in spec))
|
||||
cmake_args.append(define("LLDB_DISABLE_PYTHON", "~python" in spec))
|
||||
if spec.satisfies("@5.0.0: +python"):
|
||||
cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True))
|
||||
|
||||
if "+gold" in spec:
|
||||
cmake_args.append(
|
||||
define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include)
|
||||
)
|
||||
cmake_args.append(define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include))
|
||||
|
||||
if "+clang" in spec:
|
||||
projects.append("clang")
|
||||
@@ -612,10 +633,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
else:
|
||||
projects.append("openmp")
|
||||
|
||||
if '@8' in spec:
|
||||
cmake_args.append(from_variant('CLANG_ANALYZER_ENABLE_Z3_SOLVER', 'z3'))
|
||||
elif '@9:' in spec:
|
||||
cmake_args.append(from_variant('LLVM_ENABLE_Z3_SOLVER', 'z3'))
|
||||
if "@8" in spec:
|
||||
cmake_args.append(from_variant("CLANG_ANALYZER_ENABLE_Z3_SOLVER", "z3"))
|
||||
elif "@9:" in spec:
|
||||
cmake_args.append(from_variant("LLVM_ENABLE_Z3_SOLVER", "z3"))
|
||||
|
||||
if "+flang" in spec:
|
||||
projects.append("flang")
|
||||
@@ -634,26 +655,26 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
projects.append("polly")
|
||||
cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
|
||||
|
||||
cmake_args.extend([
|
||||
define("BUILD_SHARED_LIBS", False),
|
||||
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
||||
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
||||
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
||||
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
||||
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
||||
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
||||
define('LIBCXX_ENABLE_STATIC_ABI_LIBRARY', True)
|
||||
])
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("BUILD_SHARED_LIBS", False),
|
||||
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
||||
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
||||
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
||||
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
||||
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
||||
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
||||
define("LIBCXX_ENABLE_STATIC_ABI_LIBRARY", True),
|
||||
]
|
||||
)
|
||||
|
||||
cmake_args.append(define(
|
||||
"LLVM_TARGETS_TO_BUILD",
|
||||
get_llvm_targets_to_build(spec)))
|
||||
cmake_args.append(define("LLVM_TARGETS_TO_BUILD", get_llvm_targets_to_build(spec)))
|
||||
|
||||
cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "omp_tsan"))
|
||||
|
||||
if self.compiler.name == "gcc":
|
||||
compiler = Executable(self.compiler.cc)
|
||||
gcc_output = compiler('-print-search-dirs', output=str, error=str)
|
||||
gcc_output = compiler("-print-search-dirs", output=str, error=str)
|
||||
|
||||
for line in gcc_output.splitlines():
|
||||
if line.startswith("install:"):
|
||||
@@ -665,7 +686,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
|
||||
|
||||
if self.spec.satisfies("~code_signing platform=darwin"):
|
||||
cmake_args.append(define('LLDB_USE_SYSTEM_DEBUGSERVER', True))
|
||||
cmake_args.append(define("LLDB_USE_SYSTEM_DEBUGSERVER", True))
|
||||
|
||||
# Semicolon seperated list of projects to enable
|
||||
cmake_args.append(define("LLVM_ENABLE_PROJECTS", projects))
|
||||
@@ -689,20 +710,24 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# rebuild libomptarget to get bytecode runtime library files
|
||||
with working_dir(ompdir, create=True):
|
||||
cmake_args = [
|
||||
'-G', 'Ninja',
|
||||
define('CMAKE_BUILD_TYPE', spec.variants['build_type'].value),
|
||||
"-G",
|
||||
"Ninja",
|
||||
define("CMAKE_BUILD_TYPE", spec.variants["build_type"].value),
|
||||
define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
|
||||
define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
|
||||
define("CMAKE_INSTALL_PREFIX", spec.prefix),
|
||||
define('CMAKE_PREFIX_PATH', prefix_paths)
|
||||
define("CMAKE_PREFIX_PATH", prefix_paths),
|
||||
]
|
||||
cmake_args.extend(self.cmake_args())
|
||||
cmake_args.extend([
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
||||
spec["libelf"].prefix.include),
|
||||
self.stage.source_path + "/openmp",
|
||||
])
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
define(
|
||||
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||
),
|
||||
self.stage.source_path + "/openmp",
|
||||
]
|
||||
)
|
||||
|
||||
cmake(*cmake_args)
|
||||
ninja()
|
||||
@@ -717,22 +742,22 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
|
||||
|
||||
def llvm_config(self, *args, **kwargs):
|
||||
lc = Executable(self.prefix.bin.join('llvm-config'))
|
||||
if not kwargs.get('output'):
|
||||
kwargs['output'] = str
|
||||
lc = Executable(self.prefix.bin.join("llvm-config"))
|
||||
if not kwargs.get("output"):
|
||||
kwargs["output"] = str
|
||||
ret = lc(*args, **kwargs)
|
||||
if kwargs.get('result') == "list":
|
||||
if kwargs.get("result") == "list":
|
||||
return ret.split()
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
def get_llvm_targets_to_build(spec):
|
||||
targets = spec.variants['targets'].value
|
||||
targets = spec.variants["targets"].value
|
||||
|
||||
# Build everything?
|
||||
if 'all' in targets:
|
||||
return 'all'
|
||||
if "all" in targets:
|
||||
return "all"
|
||||
|
||||
# Convert targets variant values to CMake LLVM_TARGETS_TO_BUILD array.
|
||||
spack_to_cmake = {
|
||||
@@ -753,10 +778,10 @@ def get_llvm_targets_to_build(spec):
|
||||
"systemz": "SystemZ",
|
||||
"webassembly": "WebAssembly",
|
||||
"x86": "X86",
|
||||
"xcore": "XCore"
|
||||
"xcore": "XCore",
|
||||
}
|
||||
|
||||
if 'none' in targets:
|
||||
if "none" in targets:
|
||||
llvm_targets = set()
|
||||
else:
|
||||
llvm_targets = set(spack_to_cmake[target] for target in targets)
|
||||
|
||||
@@ -22,127 +22,140 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
with strong GPU acceleration."""
|
||||
|
||||
homepage = "https://pytorch.org/"
|
||||
git = "https://github.com/pytorch/pytorch.git"
|
||||
git = "https://github.com/pytorch/pytorch.git"
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
maintainers("adamjstewart")
|
||||
|
||||
# Exact set of modules is version- and variant-specific, just attempt to import the
|
||||
# core libraries to ensure that the package was successfully installed.
|
||||
import_modules = ['torch', 'torch.autograd', 'torch.nn', 'torch.utils']
|
||||
import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"]
|
||||
|
||||
version('master', branch='master', submodules=True)
|
||||
version('1.10.1', tag='v1.10.1', submodules=True)
|
||||
version('1.10.0', tag='v1.10.0', submodules=True)
|
||||
version('1.9.1', tag='v1.9.1', submodules=True)
|
||||
version('1.9.0', tag='v1.9.0', submodules=True)
|
||||
version('1.8.2', tag='v1.8.2', submodules=True)
|
||||
version('1.8.1', tag='v1.8.1', submodules=True)
|
||||
version('1.8.0', tag='v1.8.0', submodules=True)
|
||||
version('1.7.1', tag='v1.7.1', submodules=True)
|
||||
version('1.7.0', tag='v1.7.0', submodules=True)
|
||||
version('1.6.0', tag='v1.6.0', submodules=True)
|
||||
version('1.5.1', tag='v1.5.1', submodules=True)
|
||||
version('1.5.0', tag='v1.5.0', submodules=True)
|
||||
version('1.4.1', tag='v1.4.1', submodules=True)
|
||||
version('1.4.0', tag='v1.4.0', submodules=True, deprecated=True,
|
||||
submodules_delete=['third_party/fbgemm'])
|
||||
version('1.3.1', tag='v1.3.1', submodules=True)
|
||||
version('1.3.0', tag='v1.3.0', submodules=True)
|
||||
version('1.2.0', tag='v1.2.0', submodules=True)
|
||||
version('1.1.0', tag='v1.1.0', submodules=True)
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
version('1.0.0', tag='v1.0.0', submodules=True)
|
||||
version('0.4.1', tag='v0.4.1', submodules=True, deprecated=True,
|
||||
submodules_delete=['third_party/nervanagpu'])
|
||||
version('0.4.0', tag='v0.4.0', submodules=True, deprecated=True)
|
||||
version('0.3.1', tag='v0.3.1', submodules=True, deprecated=True)
|
||||
version("master", branch="master", submodules=True)
|
||||
version("1.10.1", tag="v1.10.1", submodules=True)
|
||||
version("1.10.0", tag="v1.10.0", submodules=True)
|
||||
version("1.9.1", tag="v1.9.1", submodules=True)
|
||||
version("1.9.0", tag="v1.9.0", submodules=True)
|
||||
version("1.8.2", tag="v1.8.2", submodules=True)
|
||||
version("1.8.1", tag="v1.8.1", submodules=True)
|
||||
version("1.8.0", tag="v1.8.0", submodules=True)
|
||||
version("1.7.1", tag="v1.7.1", submodules=True)
|
||||
version("1.7.0", tag="v1.7.0", submodules=True)
|
||||
version("1.6.0", tag="v1.6.0", submodules=True)
|
||||
version("1.5.1", tag="v1.5.1", submodules=True)
|
||||
version("1.5.0", tag="v1.5.0", submodules=True)
|
||||
version("1.4.1", tag="v1.4.1", submodules=True)
|
||||
version(
|
||||
"1.4.0",
|
||||
tag="v1.4.0",
|
||||
submodules=True,
|
||||
deprecated=True,
|
||||
submodules_delete=["third_party/fbgemm"],
|
||||
)
|
||||
version("1.3.1", tag="v1.3.1", submodules=True)
|
||||
version("1.3.0", tag="v1.3.0", submodules=True)
|
||||
version("1.2.0", tag="v1.2.0", submodules=True)
|
||||
version("1.1.0", tag="v1.1.0", submodules=True)
|
||||
version("1.0.1", tag="v1.0.1", submodules=True)
|
||||
version("1.0.0", tag="v1.0.0", submodules=True)
|
||||
version(
|
||||
"0.4.1",
|
||||
tag="v0.4.1",
|
||||
submodules=True,
|
||||
deprecated=True,
|
||||
submodules_delete=["third_party/nervanagpu"],
|
||||
)
|
||||
version("0.4.0", tag="v0.4.0", submodules=True, deprecated=True)
|
||||
version("0.3.1", tag="v0.3.1", submodules=True, deprecated=True)
|
||||
|
||||
is_darwin = sys.platform == 'darwin'
|
||||
is_darwin = sys.platform == "darwin"
|
||||
|
||||
# All options are defined in CMakeLists.txt.
|
||||
# Some are listed in setup.py, but not all.
|
||||
variant('caffe2', default=True, description='Build Caffe2')
|
||||
variant('test', default=False, description='Build C++ test binaries')
|
||||
variant('cuda', default=not is_darwin, description='Use CUDA')
|
||||
variant('rocm', default=False, description='Use ROCm')
|
||||
variant('cudnn', default=not is_darwin, description='Use cuDNN')
|
||||
variant('fbgemm', default=True, description='Use FBGEMM (quantized 8-bit server operators)')
|
||||
variant('kineto', default=True, description='Use Kineto profiling library')
|
||||
variant('magma', default=not is_darwin, description='Use MAGMA')
|
||||
variant('metal', default=is_darwin, description='Use Metal for Caffe2 iOS build')
|
||||
variant('nccl', default=not is_darwin, description='Use NCCL')
|
||||
variant('nnpack', default=True, description='Use NNPACK')
|
||||
variant('numa', default=not is_darwin, description='Use NUMA')
|
||||
variant('numpy', default=True, description='Use NumPy')
|
||||
variant('openmp', default=True, description='Use OpenMP for parallel code')
|
||||
variant('qnnpack', default=True, description='Use QNNPACK (quantized 8-bit operators)')
|
||||
variant('valgrind', default=not is_darwin, description='Use Valgrind')
|
||||
variant('xnnpack', default=True, description='Use XNNPACK')
|
||||
variant('mkldnn', default=True, description='Use MKLDNN')
|
||||
variant('distributed', default=not is_darwin, description='Use distributed')
|
||||
variant('mpi', default=not is_darwin, description='Use MPI for Caffe2')
|
||||
variant('gloo', default=not is_darwin, description='Use Gloo')
|
||||
variant('tensorpipe', default=not is_darwin, description='Use TensorPipe')
|
||||
variant('onnx_ml', default=True, description='Enable traditional ONNX ML API')
|
||||
variant('breakpad', default=True, description='Enable breakpad crash dump library')
|
||||
variant("caffe2", default=True, description="Build Caffe2")
|
||||
variant("test", default=False, description="Build C++ test binaries")
|
||||
variant("cuda", default=not is_darwin, description="Use CUDA")
|
||||
variant("rocm", default=False, description="Use ROCm")
|
||||
variant("cudnn", default=not is_darwin, description="Use cuDNN")
|
||||
variant("fbgemm", default=True, description="Use FBGEMM (quantized 8-bit server operators)")
|
||||
variant("kineto", default=True, description="Use Kineto profiling library")
|
||||
variant("magma", default=not is_darwin, description="Use MAGMA")
|
||||
variant("metal", default=is_darwin, description="Use Metal for Caffe2 iOS build")
|
||||
variant("nccl", default=not is_darwin, description="Use NCCL")
|
||||
variant("nnpack", default=True, description="Use NNPACK")
|
||||
variant("numa", default=not is_darwin, description="Use NUMA")
|
||||
variant("numpy", default=True, description="Use NumPy")
|
||||
variant("openmp", default=True, description="Use OpenMP for parallel code")
|
||||
variant("qnnpack", default=True, description="Use QNNPACK (quantized 8-bit operators)")
|
||||
variant("valgrind", default=not is_darwin, description="Use Valgrind")
|
||||
variant("xnnpack", default=True, description="Use XNNPACK")
|
||||
variant("mkldnn", default=True, description="Use MKLDNN")
|
||||
variant("distributed", default=not is_darwin, description="Use distributed")
|
||||
variant("mpi", default=not is_darwin, description="Use MPI for Caffe2")
|
||||
variant("gloo", default=not is_darwin, description="Use Gloo")
|
||||
variant("tensorpipe", default=not is_darwin, description="Use TensorPipe")
|
||||
variant("onnx_ml", default=True, description="Enable traditional ONNX ML API")
|
||||
variant("breakpad", default=True, description="Enable breakpad crash dump library")
|
||||
|
||||
conflicts('+cuda', when='+rocm')
|
||||
conflicts('+cudnn', when='~cuda')
|
||||
conflicts('+magma', when='~cuda')
|
||||
conflicts('+nccl', when='~cuda~rocm')
|
||||
conflicts('+nccl', when='platform=darwin')
|
||||
conflicts('+numa', when='platform=darwin', msg='Only available on Linux')
|
||||
conflicts('+valgrind', when='platform=darwin', msg='Only available on Linux')
|
||||
conflicts('+mpi', when='~distributed')
|
||||
conflicts('+gloo', when='~distributed')
|
||||
conflicts('+tensorpipe', when='~distributed')
|
||||
conflicts('+kineto', when='@:1.7')
|
||||
conflicts('+valgrind', when='@:1.7')
|
||||
conflicts('~caffe2', when='@0.4.0:1.6') # no way to disable caffe2?
|
||||
conflicts('+caffe2', when='@:0.3.1') # caffe2 did not yet exist?
|
||||
conflicts('+tensorpipe', when='@:1.5')
|
||||
conflicts('+xnnpack', when='@:1.4')
|
||||
conflicts('~onnx_ml', when='@:1.4') # no way to disable ONNX?
|
||||
conflicts('+rocm', when='@:0.4')
|
||||
conflicts('+cudnn', when='@:0.4')
|
||||
conflicts('+fbgemm', when='@:0.4,1.4.0')
|
||||
conflicts('+qnnpack', when='@:0.4')
|
||||
conflicts('+mkldnn', when='@:0.4')
|
||||
conflicts('+breakpad', when='@:1.9') # Option appeared in 1.10.0
|
||||
conflicts('+breakpad', when='target=ppc64:', msg='Unsupported')
|
||||
conflicts('+breakpad', when='target=ppc64le:', msg='Unsupported')
|
||||
conflicts("+cuda", when="+rocm")
|
||||
conflicts("+cudnn", when="~cuda")
|
||||
conflicts("+magma", when="~cuda")
|
||||
conflicts("+nccl", when="~cuda~rocm")
|
||||
conflicts("+nccl", when="platform=darwin")
|
||||
conflicts("+numa", when="platform=darwin", msg="Only available on Linux")
|
||||
conflicts("+valgrind", when="platform=darwin", msg="Only available on Linux")
|
||||
conflicts("+mpi", when="~distributed")
|
||||
conflicts("+gloo", when="~distributed")
|
||||
conflicts("+tensorpipe", when="~distributed")
|
||||
conflicts("+kineto", when="@:1.7")
|
||||
conflicts("+valgrind", when="@:1.7")
|
||||
conflicts("~caffe2", when="@0.4.0:1.6") # no way to disable caffe2?
|
||||
conflicts("+caffe2", when="@:0.3.1") # caffe2 did not yet exist?
|
||||
conflicts("+tensorpipe", when="@:1.5")
|
||||
conflicts("+xnnpack", when="@:1.4")
|
||||
conflicts("~onnx_ml", when="@:1.4") # no way to disable ONNX?
|
||||
conflicts("+rocm", when="@:0.4")
|
||||
conflicts("+cudnn", when="@:0.4")
|
||||
conflicts("+fbgemm", when="@:0.4,1.4.0")
|
||||
conflicts("+qnnpack", when="@:0.4")
|
||||
conflicts("+mkldnn", when="@:0.4")
|
||||
conflicts("+breakpad", when="@:1.9") # Option appeared in 1.10.0
|
||||
conflicts("+breakpad", when="target=ppc64:", msg="Unsupported")
|
||||
conflicts("+breakpad", when="target=ppc64le:", msg="Unsupported")
|
||||
|
||||
conflicts('cuda_arch=none', when='+cuda',
|
||||
msg='Must specify CUDA compute capabilities of your GPU, see '
|
||||
'https://developer.nvidia.com/cuda-gpus')
|
||||
conflicts(
|
||||
"cuda_arch=none",
|
||||
when="+cuda",
|
||||
msg="Must specify CUDA compute capabilities of your GPU, see "
|
||||
"https://developer.nvidia.com/cuda-gpus",
|
||||
)
|
||||
|
||||
# Required dependencies
|
||||
depends_on('cmake@3.5:', type='build')
|
||||
depends_on("cmake@3.5:", type="build")
|
||||
# Use Ninja generator to speed up build times, automatically used if found
|
||||
depends_on('ninja@1.5:', when='@1.1.0:', type='build')
|
||||
depends_on("ninja@1.5:", when="@1.1.0:", type="build")
|
||||
# See python_min_version in setup.py
|
||||
depends_on('python@3.6.2:', when='@1.7.1:', type=('build', 'link', 'run'))
|
||||
depends_on('python@3.6.1:', when='@1.6.0:1.7.0', type=('build', 'link', 'run'))
|
||||
depends_on('python@3.5:', when='@1.5.0:1.5', type=('build', 'link', 'run'))
|
||||
depends_on('python@2.7:2.8,3.5:', when='@1.4.0:1.4', type=('build', 'link', 'run'))
|
||||
depends_on('python@2.7:2.8,3.5:3.7', when='@:1.3', type=('build', 'link', 'run'))
|
||||
depends_on('py-setuptools', type=('build', 'run'))
|
||||
depends_on('py-future', when='@1.5:', type=('build', 'run'))
|
||||
depends_on('py-future', when='@1.1: ^python@:2', type=('build', 'run'))
|
||||
depends_on('py-pyyaml', type=('build', 'run'))
|
||||
depends_on('py-typing', when='@0.4: ^python@:3.4', type=('build', 'run'))
|
||||
depends_on('py-typing-extensions', when='@1.7:', type=('build', 'run'))
|
||||
depends_on('py-pybind11@2.6.2', when='@1.8.0:', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.3.0', when='@1.1.0:1.7', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.2.4', when='@1.0.0:1.0', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.2.2', when='@0.4.0:0.4', type=('build', 'link', 'run'))
|
||||
depends_on('py-dataclasses', when='@1.7: ^python@3.6.0:3.6', type=('build', 'run'))
|
||||
depends_on('py-tqdm', type='run')
|
||||
depends_on('py-protobuf', when='@0.4:', type=('build', 'run'))
|
||||
depends_on('protobuf', when='@0.4:')
|
||||
depends_on('blas')
|
||||
depends_on('lapack')
|
||||
depends_on('eigen', when='@0.4:')
|
||||
depends_on("python@3.6.2:", when="@1.7.1:", type=("build", "link", "run"))
|
||||
depends_on("python@3.6.1:", when="@1.6.0:1.7.0", type=("build", "link", "run"))
|
||||
depends_on("python@3.5:", when="@1.5.0:1.5", type=("build", "link", "run"))
|
||||
depends_on("python@2.7:2.8,3.5:", when="@1.4.0:1.4", type=("build", "link", "run"))
|
||||
depends_on("python@2.7:2.8,3.5:3.7", when="@:1.3", type=("build", "link", "run"))
|
||||
depends_on("py-setuptools", type=("build", "run"))
|
||||
depends_on("py-future", when="@1.5:", type=("build", "run"))
|
||||
depends_on("py-future", when="@1.1: ^python@:2", type=("build", "run"))
|
||||
depends_on("py-pyyaml", type=("build", "run"))
|
||||
depends_on("py-typing", when="@0.4: ^python@:3.4", type=("build", "run"))
|
||||
depends_on("py-typing-extensions", when="@1.7:", type=("build", "run"))
|
||||
depends_on("py-pybind11@2.6.2", when="@1.8.0:", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.3.0", when="@1.1.0:1.7", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.2.4", when="@1.0.0:1.0", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.2.2", when="@0.4.0:0.4", type=("build", "link", "run"))
|
||||
depends_on("py-dataclasses", when="@1.7: ^python@3.6.0:3.6", type=("build", "run"))
|
||||
depends_on("py-tqdm", type="run")
|
||||
depends_on("py-protobuf", when="@0.4:", type=("build", "run"))
|
||||
depends_on("protobuf", when="@0.4:")
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
depends_on("eigen", when="@0.4:")
|
||||
# https://github.com/pytorch/pytorch/issues/60329
|
||||
# depends_on('cpuinfo@2020-12-17', when='@1.8.0:')
|
||||
# depends_on('cpuinfo@2020-06-11', when='@1.6.0:1.7')
|
||||
@@ -152,30 +165,30 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# depends_on('sleef@3.4.0_2019-07-30', when='@1.6.0:1.7')
|
||||
# https://github.com/Maratyszcza/FP16/issues/18
|
||||
# depends_on('fp16@2020-05-14', when='@1.6.0:')
|
||||
depends_on('pthreadpool@2021-04-13', when='@1.9.0:')
|
||||
depends_on('pthreadpool@2020-10-05', when='@1.8.0:1.8')
|
||||
depends_on('pthreadpool@2020-06-15', when='@1.6.0:1.7')
|
||||
depends_on('psimd@2020-05-17', when='@1.6.0:')
|
||||
depends_on('fxdiv@2020-04-17', when='@1.6.0:')
|
||||
depends_on('benchmark', when='@1.6:+test')
|
||||
depends_on("pthreadpool@2021-04-13", when="@1.9.0:")
|
||||
depends_on("pthreadpool@2020-10-05", when="@1.8.0:1.8")
|
||||
depends_on("pthreadpool@2020-06-15", when="@1.6.0:1.7")
|
||||
depends_on("psimd@2020-05-17", when="@1.6.0:")
|
||||
depends_on("fxdiv@2020-04-17", when="@1.6.0:")
|
||||
depends_on("benchmark", when="@1.6:+test")
|
||||
|
||||
# Optional dependencies
|
||||
depends_on('cuda@7.5:', when='+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cuda@9:', when='@1.1:+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cuda@9.2:', when='@1.6:+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cudnn@6.0:7', when='@:1.0+cudnn')
|
||||
depends_on('cudnn@7.0:7', when='@1.1.0:1.5+cudnn')
|
||||
depends_on('cudnn@7.0:', when='@1.6.0:+cudnn')
|
||||
depends_on('magma', when='+magma')
|
||||
depends_on('nccl', when='+nccl')
|
||||
depends_on('numactl', when='+numa')
|
||||
depends_on('py-numpy', when='+numpy', type=('build', 'run'))
|
||||
depends_on('llvm-openmp', when='%apple-clang +openmp')
|
||||
depends_on('valgrind', when='+valgrind')
|
||||
depends_on("cuda@7.5:", when="+cuda", type=("build", "link", "run"))
|
||||
depends_on("cuda@9:", when="@1.1:+cuda", type=("build", "link", "run"))
|
||||
depends_on("cuda@9.2:", when="@1.6:+cuda", type=("build", "link", "run"))
|
||||
depends_on("cudnn@6.0:7", when="@:1.0+cudnn")
|
||||
depends_on("cudnn@7.0:7", when="@1.1.0:1.5+cudnn")
|
||||
depends_on("cudnn@7.0:", when="@1.6.0:+cudnn")
|
||||
depends_on("magma", when="+magma")
|
||||
depends_on("nccl", when="+nccl")
|
||||
depends_on("numactl", when="+numa")
|
||||
depends_on("py-numpy", when="+numpy", type=("build", "run"))
|
||||
depends_on("llvm-openmp", when="%apple-clang +openmp")
|
||||
depends_on("valgrind", when="+valgrind")
|
||||
# https://github.com/pytorch/pytorch/issues/60332
|
||||
# depends_on('xnnpack@2021-02-22', when='@1.8.0:+xnnpack')
|
||||
# depends_on('xnnpack@2020-03-23', when='@1.6.0:1.7+xnnpack')
|
||||
depends_on('mpi', when='+mpi')
|
||||
depends_on("mpi", when="+mpi")
|
||||
# https://github.com/pytorch/pytorch/issues/60270
|
||||
# depends_on('gloo@2021-05-04', when='@1.9.0:+gloo')
|
||||
# depends_on('gloo@2020-09-18', when='@1.7.0:1.8+gloo')
|
||||
@@ -183,31 +196,35 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# https://github.com/pytorch/pytorch/issues/60331
|
||||
# depends_on('onnx@1.8.0_2020-11-03', when='@1.8.0:+onnx_ml')
|
||||
# depends_on('onnx@1.7.0_2020-05-31', when='@1.6.0:1.7+onnx_ml')
|
||||
depends_on('mkl', when='+mkldnn')
|
||||
depends_on("mkl", when="+mkldnn")
|
||||
|
||||
# Test dependencies
|
||||
depends_on('py-hypothesis', type='test')
|
||||
depends_on('py-six', type='test')
|
||||
depends_on('py-psutil', type='test')
|
||||
depends_on("py-hypothesis", type="test")
|
||||
depends_on("py-six", type="test")
|
||||
depends_on("py-psutil", type="test")
|
||||
|
||||
# Fix BLAS being overridden by MKL
|
||||
# https://github.com/pytorch/pytorch/issues/60328
|
||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch',
|
||||
sha256='e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9',
|
||||
when='@1.2.0:')
|
||||
patch(
|
||||
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch",
|
||||
sha256="e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9",
|
||||
when="@1.2.0:",
|
||||
)
|
||||
|
||||
# Fixes build on older systems with glibc <2.12
|
||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch',
|
||||
sha256='e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395',
|
||||
when='@1.1.0:1.8.1')
|
||||
patch(
|
||||
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch",
|
||||
sha256="e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395",
|
||||
when="@1.1.0:1.8.1",
|
||||
)
|
||||
|
||||
# Fixes CMake configuration error when XNNPACK is disabled
|
||||
# https://github.com/pytorch/pytorch/pull/35607
|
||||
# https://github.com/pytorch/pytorch/pull/37865
|
||||
patch('xnnpack.patch', when='@1.5.0:1.5')
|
||||
patch("xnnpack.patch", when="@1.5.0:1.5")
|
||||
|
||||
# Fixes build error when ROCm is enabled for pytorch-1.5 release
|
||||
patch('rocm.patch', when='@1.5.0:1.5+rocm')
|
||||
patch("rocm.patch", when="@1.5.0:1.5+rocm")
|
||||
|
||||
# Fixes fatal error: sleef.h: No such file or directory
|
||||
# https://github.com/pytorch/pytorch/pull/35359
|
||||
@@ -216,47 +233,56 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
|
||||
# Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
|
||||
# https://github.com/pytorch/pytorch/pull/37086
|
||||
patch('https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch',
|
||||
sha256='17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049',
|
||||
when='@1.1:1.5')
|
||||
patch(
|
||||
"https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch",
|
||||
sha256="17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049",
|
||||
when="@1.1:1.5",
|
||||
)
|
||||
|
||||
# Removes duplicate definition of getCusparseErrorString
|
||||
# https://github.com/pytorch/pytorch/issues/32083
|
||||
patch('cusparseGetErrorString.patch', when='@0.4.1:1.0^cuda@10.1.243:')
|
||||
patch("cusparseGetErrorString.patch", when="@0.4.1:1.0^cuda@10.1.243:")
|
||||
|
||||
# Fixes 'FindOpenMP.cmake'
|
||||
# to detect openmp settings used by Fujitsu compiler.
|
||||
patch('detect_omp_of_fujitsu_compiler.patch', when='%fj')
|
||||
patch("detect_omp_of_fujitsu_compiler.patch", when="%fj")
|
||||
|
||||
# Fix compilation of +distributed~tensorpipe
|
||||
# https://github.com/pytorch/pytorch/issues/68002
|
||||
patch('https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch',
|
||||
sha256='e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8', when='@1.10.0+distributed~tensorpipe')
|
||||
patch(
|
||||
"https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch",
|
||||
sha256="e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8",
|
||||
when="@1.10.0+distributed~tensorpipe",
|
||||
)
|
||||
|
||||
# Both build and install run cmake/make/make install
|
||||
# Only run once to speed up build times
|
||||
phases = ['install']
|
||||
phases = ["install"]
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
||||
'torch', 'lib')
|
||||
return find_libraries('libtorch', root)
|
||||
root = join_path(
|
||||
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "lib"
|
||||
)
|
||||
return find_libraries("libtorch", root)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
||||
'torch', 'include')
|
||||
root = join_path(
|
||||
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "include"
|
||||
)
|
||||
headers = find_all_headers(root)
|
||||
headers.directories = [root]
|
||||
return headers
|
||||
|
||||
@when('@1.5.0:')
|
||||
@when("@1.5.0:")
|
||||
def patch(self):
|
||||
# https://github.com/pytorch/pytorch/issues/52208
|
||||
filter_file('torch_global_deps PROPERTIES LINKER_LANGUAGE C',
|
||||
'torch_global_deps PROPERTIES LINKER_LANGUAGE CXX',
|
||||
'caffe2/CMakeLists.txt')
|
||||
filter_file(
|
||||
"torch_global_deps PROPERTIES LINKER_LANGUAGE C",
|
||||
"torch_global_deps PROPERTIES LINKER_LANGUAGE CXX",
|
||||
"caffe2/CMakeLists.txt",
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Set environment variables used to control the build.
|
||||
@@ -269,7 +295,8 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
most flags defined in ``CMakeLists.txt`` can be specified as
|
||||
environment variables.
|
||||
"""
|
||||
def enable_or_disable(variant, keyword='USE', var=None, newer=False):
|
||||
|
||||
def enable_or_disable(variant, keyword="USE", var=None, newer=False):
|
||||
"""Set environment variable to enable or disable support for a
|
||||
particular variant.
|
||||
|
||||
@@ -284,137 +311,135 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
|
||||
# Version 1.1.0 switched from NO_* to USE_* or BUILD_*
|
||||
# But some newer variants have always used USE_* or BUILD_*
|
||||
if self.spec.satisfies('@1.1:') or newer:
|
||||
if '+' + variant in self.spec:
|
||||
env.set(keyword + '_' + var, 'ON')
|
||||
if self.spec.satisfies("@1.1:") or newer:
|
||||
if "+" + variant in self.spec:
|
||||
env.set(keyword + "_" + var, "ON")
|
||||
else:
|
||||
env.set(keyword + '_' + var, 'OFF')
|
||||
env.set(keyword + "_" + var, "OFF")
|
||||
else:
|
||||
if '+' + variant in self.spec:
|
||||
env.unset('NO_' + var)
|
||||
if "+" + variant in self.spec:
|
||||
env.unset("NO_" + var)
|
||||
else:
|
||||
env.set('NO_' + var, 'ON')
|
||||
env.set("NO_" + var, "ON")
|
||||
|
||||
# Build in parallel to speed up build times
|
||||
env.set('MAX_JOBS', make_jobs)
|
||||
env.set("MAX_JOBS", make_jobs)
|
||||
|
||||
# Spack logs have trouble handling colored output
|
||||
env.set('COLORIZE_OUTPUT', 'OFF')
|
||||
env.set("COLORIZE_OUTPUT", "OFF")
|
||||
|
||||
if self.spec.satisfies('@0.4:'):
|
||||
enable_or_disable('test', keyword='BUILD')
|
||||
if self.spec.satisfies("@0.4:"):
|
||||
enable_or_disable("test", keyword="BUILD")
|
||||
|
||||
if self.spec.satisfies('@1.7:'):
|
||||
enable_or_disable('caffe2', keyword='BUILD')
|
||||
if self.spec.satisfies("@1.7:"):
|
||||
enable_or_disable("caffe2", keyword="BUILD")
|
||||
|
||||
enable_or_disable('cuda')
|
||||
if '+cuda' in self.spec:
|
||||
enable_or_disable("cuda")
|
||||
if "+cuda" in self.spec:
|
||||
# cmake/public/cuda.cmake
|
||||
# cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake
|
||||
env.unset('CUDA_ROOT')
|
||||
torch_cuda_arch = ';'.join('{0:.1f}'.format(float(i) / 10.0) for i
|
||||
in
|
||||
self.spec.variants['cuda_arch'].value)
|
||||
env.set('TORCH_CUDA_ARCH_LIST', torch_cuda_arch)
|
||||
env.unset("CUDA_ROOT")
|
||||
torch_cuda_arch = ";".join(
|
||||
"{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value
|
||||
)
|
||||
env.set("TORCH_CUDA_ARCH_LIST", torch_cuda_arch)
|
||||
|
||||
enable_or_disable('rocm')
|
||||
enable_or_disable("rocm")
|
||||
|
||||
enable_or_disable('cudnn')
|
||||
if '+cudnn' in self.spec:
|
||||
enable_or_disable("cudnn")
|
||||
if "+cudnn" in self.spec:
|
||||
# cmake/Modules_CUDA_fix/FindCUDNN.cmake
|
||||
env.set('CUDNN_INCLUDE_DIR', self.spec['cudnn'].prefix.include)
|
||||
env.set('CUDNN_LIBRARY', self.spec['cudnn'].libs[0])
|
||||
env.set("CUDNN_INCLUDE_DIR", self.spec["cudnn"].prefix.include)
|
||||
env.set("CUDNN_LIBRARY", self.spec["cudnn"].libs[0])
|
||||
|
||||
enable_or_disable('fbgemm')
|
||||
if self.spec.satisfies('@1.8:'):
|
||||
enable_or_disable('kineto')
|
||||
enable_or_disable('magma')
|
||||
enable_or_disable('metal')
|
||||
if self.spec.satisfies('@1.10:'):
|
||||
enable_or_disable('breakpad')
|
||||
enable_or_disable("fbgemm")
|
||||
if self.spec.satisfies("@1.8:"):
|
||||
enable_or_disable("kineto")
|
||||
enable_or_disable("magma")
|
||||
enable_or_disable("metal")
|
||||
if self.spec.satisfies("@1.10:"):
|
||||
enable_or_disable("breakpad")
|
||||
|
||||
enable_or_disable('nccl')
|
||||
if '+nccl' in self.spec:
|
||||
env.set('NCCL_LIB_DIR', self.spec['nccl'].libs.directories[0])
|
||||
env.set('NCCL_INCLUDE_DIR', self.spec['nccl'].prefix.include)
|
||||
enable_or_disable("nccl")
|
||||
if "+nccl" in self.spec:
|
||||
env.set("NCCL_LIB_DIR", self.spec["nccl"].libs.directories[0])
|
||||
env.set("NCCL_INCLUDE_DIR", self.spec["nccl"].prefix.include)
|
||||
|
||||
# cmake/External/nnpack.cmake
|
||||
enable_or_disable('nnpack')
|
||||
enable_or_disable("nnpack")
|
||||
|
||||
enable_or_disable('numa')
|
||||
if '+numa' in self.spec:
|
||||
enable_or_disable("numa")
|
||||
if "+numa" in self.spec:
|
||||
# cmake/Modules/FindNuma.cmake
|
||||
env.set('NUMA_ROOT_DIR', self.spec['numactl'].prefix)
|
||||
env.set("NUMA_ROOT_DIR", self.spec["numactl"].prefix)
|
||||
|
||||
# cmake/Modules/FindNumPy.cmake
|
||||
enable_or_disable('numpy')
|
||||
enable_or_disable("numpy")
|
||||
# cmake/Modules/FindOpenMP.cmake
|
||||
enable_or_disable('openmp', newer=True)
|
||||
enable_or_disable('qnnpack')
|
||||
if self.spec.satisfies('@1.3:'):
|
||||
enable_or_disable('qnnpack', var='PYTORCH_QNNPACK')
|
||||
if self.spec.satisfies('@1.8:'):
|
||||
enable_or_disable('valgrind')
|
||||
if self.spec.satisfies('@1.5:'):
|
||||
enable_or_disable('xnnpack')
|
||||
enable_or_disable('mkldnn')
|
||||
enable_or_disable('distributed')
|
||||
enable_or_disable('mpi')
|
||||
enable_or_disable("openmp", newer=True)
|
||||
enable_or_disable("qnnpack")
|
||||
if self.spec.satisfies("@1.3:"):
|
||||
enable_or_disable("qnnpack", var="PYTORCH_QNNPACK")
|
||||
if self.spec.satisfies("@1.8:"):
|
||||
enable_or_disable("valgrind")
|
||||
if self.spec.satisfies("@1.5:"):
|
||||
enable_or_disable("xnnpack")
|
||||
enable_or_disable("mkldnn")
|
||||
enable_or_disable("distributed")
|
||||
enable_or_disable("mpi")
|
||||
# cmake/Modules/FindGloo.cmake
|
||||
enable_or_disable('gloo', newer=True)
|
||||
if self.spec.satisfies('@1.6:'):
|
||||
enable_or_disable('tensorpipe')
|
||||
enable_or_disable("gloo", newer=True)
|
||||
if self.spec.satisfies("@1.6:"):
|
||||
enable_or_disable("tensorpipe")
|
||||
|
||||
if '+onnx_ml' in self.spec:
|
||||
env.set('ONNX_ML', 'ON')
|
||||
if "+onnx_ml" in self.spec:
|
||||
env.set("ONNX_ML", "ON")
|
||||
else:
|
||||
env.set('ONNX_ML', 'OFF')
|
||||
env.set("ONNX_ML", "OFF")
|
||||
|
||||
if not self.spec.satisfies('@master'):
|
||||
env.set('PYTORCH_BUILD_VERSION', self.version)
|
||||
env.set('PYTORCH_BUILD_NUMBER', 0)
|
||||
if not self.spec.satisfies("@master"):
|
||||
env.set("PYTORCH_BUILD_VERSION", self.version)
|
||||
env.set("PYTORCH_BUILD_NUMBER", 0)
|
||||
|
||||
# BLAS to be used by Caffe2
|
||||
# Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake
|
||||
if self.spec['blas'].name == 'atlas':
|
||||
env.set('BLAS', 'ATLAS')
|
||||
env.set('WITH_BLAS', 'atlas')
|
||||
elif self.spec['blas'].name in ['blis', 'amdblis']:
|
||||
env.set('BLAS', 'BLIS')
|
||||
env.set('WITH_BLAS', 'blis')
|
||||
elif self.spec['blas'].name == 'eigen':
|
||||
env.set('BLAS', 'Eigen')
|
||||
elif self.spec['lapack'].name in ['libflame', 'amdlibflame']:
|
||||
env.set('BLAS', 'FLAME')
|
||||
env.set('WITH_BLAS', 'FLAME')
|
||||
elif self.spec['blas'].name in [
|
||||
'intel-mkl', 'intel-parallel-studio', 'intel-oneapi-mkl']:
|
||||
env.set('BLAS', 'MKL')
|
||||
env.set('WITH_BLAS', 'mkl')
|
||||
elif self.spec['blas'].name == 'openblas':
|
||||
env.set('BLAS', 'OpenBLAS')
|
||||
env.set('WITH_BLAS', 'open')
|
||||
elif self.spec['blas'].name == 'veclibfort':
|
||||
env.set('BLAS', 'vecLib')
|
||||
env.set('WITH_BLAS', 'veclib')
|
||||
if self.spec["blas"].name == "atlas":
|
||||
env.set("BLAS", "ATLAS")
|
||||
env.set("WITH_BLAS", "atlas")
|
||||
elif self.spec["blas"].name in ["blis", "amdblis"]:
|
||||
env.set("BLAS", "BLIS")
|
||||
env.set("WITH_BLAS", "blis")
|
||||
elif self.spec["blas"].name == "eigen":
|
||||
env.set("BLAS", "Eigen")
|
||||
elif self.spec["lapack"].name in ["libflame", "amdlibflame"]:
|
||||
env.set("BLAS", "FLAME")
|
||||
env.set("WITH_BLAS", "FLAME")
|
||||
elif self.spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
|
||||
env.set("BLAS", "MKL")
|
||||
env.set("WITH_BLAS", "mkl")
|
||||
elif self.spec["blas"].name == "openblas":
|
||||
env.set("BLAS", "OpenBLAS")
|
||||
env.set("WITH_BLAS", "open")
|
||||
elif self.spec["blas"].name == "veclibfort":
|
||||
env.set("BLAS", "vecLib")
|
||||
env.set("WITH_BLAS", "veclib")
|
||||
else:
|
||||
env.set('BLAS', 'Generic')
|
||||
env.set('WITH_BLAS', 'generic')
|
||||
env.set("BLAS", "Generic")
|
||||
env.set("WITH_BLAS", "generic")
|
||||
|
||||
# Don't use vendored third-party libraries when possible
|
||||
env.set('BUILD_CUSTOM_PROTOBUF', 'OFF')
|
||||
env.set('USE_SYSTEM_NCCL', 'ON')
|
||||
env.set('USE_SYSTEM_EIGEN_INSTALL', 'ON')
|
||||
if self.spec.satisfies('@0.4:'):
|
||||
env.set('pybind11_DIR', self.spec['py-pybind11'].prefix)
|
||||
env.set('pybind11_INCLUDE_DIR',
|
||||
self.spec['py-pybind11'].prefix.include)
|
||||
if self.spec.satisfies('@1.10:'):
|
||||
env.set('USE_SYSTEM_PYBIND11', 'ON')
|
||||
env.set("BUILD_CUSTOM_PROTOBUF", "OFF")
|
||||
env.set("USE_SYSTEM_NCCL", "ON")
|
||||
env.set("USE_SYSTEM_EIGEN_INSTALL", "ON")
|
||||
if self.spec.satisfies("@0.4:"):
|
||||
env.set("pybind11_DIR", self.spec["py-pybind11"].prefix)
|
||||
env.set("pybind11_INCLUDE_DIR", self.spec["py-pybind11"].prefix.include)
|
||||
if self.spec.satisfies("@1.10:"):
|
||||
env.set("USE_SYSTEM_PYBIND11", "ON")
|
||||
# https://github.com/pytorch/pytorch/issues/60334
|
||||
# if self.spec.satisfies('@1.8:'):
|
||||
# env.set('USE_SYSTEM_SLEEF', 'ON')
|
||||
if self.spec.satisfies('@1.6:'):
|
||||
if self.spec.satisfies("@1.6:"):
|
||||
# env.set('USE_SYSTEM_LIBS', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60329
|
||||
# env.set('USE_SYSTEM_CPUINFO', 'ON')
|
||||
@@ -422,27 +447,26 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# env.set('USE_SYSTEM_GLOO', 'ON')
|
||||
# https://github.com/Maratyszcza/FP16/issues/18
|
||||
# env.set('USE_SYSTEM_FP16', 'ON')
|
||||
env.set('USE_SYSTEM_PTHREADPOOL', 'ON')
|
||||
env.set('USE_SYSTEM_PSIMD', 'ON')
|
||||
env.set('USE_SYSTEM_FXDIV', 'ON')
|
||||
env.set('USE_SYSTEM_BENCHMARK', 'ON')
|
||||
env.set("USE_SYSTEM_PTHREADPOOL", "ON")
|
||||
env.set("USE_SYSTEM_PSIMD", "ON")
|
||||
env.set("USE_SYSTEM_FXDIV", "ON")
|
||||
env.set("USE_SYSTEM_BENCHMARK", "ON")
|
||||
# https://github.com/pytorch/pytorch/issues/60331
|
||||
# env.set('USE_SYSTEM_ONNX', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60332
|
||||
# env.set('USE_SYSTEM_XNNPACK', 'ON')
|
||||
|
||||
@run_before('install')
|
||||
@run_before("install")
|
||||
def build_amd(self):
|
||||
if '+rocm' in self.spec:
|
||||
python(os.path.join('tools', 'amd_build', 'build_amd.py'))
|
||||
if "+rocm" in self.spec:
|
||||
python(os.path.join("tools", "amd_build", "build_amd.py"))
|
||||
|
||||
@run_after('install')
|
||||
@run_after("install")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def install_test(self):
|
||||
with working_dir('test'):
|
||||
python('run_test.py')
|
||||
with working_dir("test"):
|
||||
python("run_test.py")
|
||||
|
||||
# Tests need to be re-added since `phases` was overridden
|
||||
run_after('install')(
|
||||
PythonPackage._run_default_install_time_test_callbacks)
|
||||
run_after('install')(PythonPackage.sanity_check_prefix)
|
||||
run_after("install")(PythonPackage._run_default_install_time_test_callbacks)
|
||||
run_after("install")(PythonPackage.sanity_check_prefix)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user