Compare commits
440 Commits
features/e
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a864108bc1 | ||
![]() |
4aed051b73 | ||
![]() |
43996e84c3 | ||
![]() |
14ebf1985e | ||
![]() |
0c2b98ca70 | ||
![]() |
43143b134a | ||
![]() |
45697582dc | ||
![]() |
9a214ffb75 | ||
![]() |
4286c7398b | ||
![]() |
131acbdacc | ||
![]() |
86d2399c76 | ||
![]() |
d89d6dab6d | ||
![]() |
e6c94e9126 | ||
![]() |
af5b93bb97 | ||
![]() |
dc25da1931 | ||
![]() |
067e40591a | ||
![]() |
483688580e | ||
![]() |
7448acaf98 | ||
![]() |
dfe2d5dca2 | ||
![]() |
b980fcff64 | ||
![]() |
b405559e7b | ||
![]() |
7c5e3ddac5 | ||
![]() |
6ffc11c46d | ||
![]() |
a45d129f86 | ||
![]() |
3ccc527d74 | ||
![]() |
51c75c6da3 | ||
![]() |
c3f4822f92 | ||
![]() |
ccae0ad534 | ||
![]() |
95fffe16a8 | ||
![]() |
640ccf2ef9 | ||
![]() |
78f33bc002 | ||
![]() |
25cc734452 | ||
![]() |
71b17a6945 | ||
![]() |
9425df1259 | ||
![]() |
e81076edd0 | ||
![]() |
84043d97b7 | ||
![]() |
f93b61338c | ||
![]() |
526ae84137 | ||
![]() |
d960d29485 | ||
![]() |
5388ab1ac1 | ||
![]() |
9ce075ed3d | ||
![]() |
46419502cb | ||
![]() |
50623f6bde | ||
![]() |
d8922233ce | ||
![]() |
26a98f4c14 | ||
![]() |
b6b33cfe7a | ||
![]() |
41582f76bd | ||
![]() |
97972d300a | ||
![]() |
7470d14b35 | ||
![]() |
ac825bd9d4 | ||
![]() |
e24bf70af4 | ||
![]() |
dfbdcaf551 | ||
![]() |
cc5ea14a6e | ||
![]() |
efd2ed750d | ||
![]() |
ab10b645c6 | ||
![]() |
e79f275bc9 | ||
![]() |
09b4ed6c80 | ||
![]() |
66f75407d1 | ||
![]() |
5db241c755 | ||
![]() |
1dcc67535a | ||
![]() |
46fe1f48bc | ||
![]() |
30201e3381 | ||
![]() |
501bb88de2 | ||
![]() |
c5adb05433 | ||
![]() |
8528106484 | ||
![]() |
134dceb055 | ||
![]() |
aa3744299b | ||
![]() |
105ac0c377 | ||
![]() |
1949f67a71 | ||
![]() |
0314071763 | ||
![]() |
6e13d7d917 | ||
![]() |
2d4758bdd9 | ||
![]() |
ff002316a8 | ||
![]() |
251282812b | ||
![]() |
4ac43b5032 | ||
![]() |
05b6ac16bc | ||
![]() |
8164712264 | ||
![]() |
ce0b9ea8cf | ||
![]() |
c560053c39 | ||
![]() |
5b0ca6d287 | ||
![]() |
887d356e01 | ||
![]() |
95ca9dea89 | ||
![]() |
cb23362b7f | ||
![]() |
42c4a8b388 | ||
![]() |
cc1f403385 | ||
![]() |
b1d281f197 | ||
![]() |
29a1c418b3 | ||
![]() |
36dd325187 | ||
![]() |
585e150816 | ||
![]() |
9a30ba1a4d | ||
![]() |
d5bb152165 | ||
![]() |
0c6d0541f0 | ||
![]() |
7b977dc103 | ||
![]() |
f98bfebce4 | ||
![]() |
4907315079 | ||
![]() |
48168de1cc | ||
![]() |
d99892e490 | ||
![]() |
09d9b48957 | ||
![]() |
62aa9d87ee | ||
![]() |
0470fe545f | ||
![]() |
db8bf333d3 | ||
![]() |
f73c8f2255 | ||
![]() |
42ed4d81b7 | ||
![]() |
e76b039997 | ||
![]() |
b49d098e3f | ||
![]() |
cd67b2a1a9 | ||
![]() |
a076548bd4 | ||
![]() |
3d342ac69a | ||
![]() |
88fc8ae591 | ||
![]() |
ff6ac42812 | ||
![]() |
c96f93b2a3 | ||
![]() |
cbe4a48291 | ||
![]() |
ebd41134fe | ||
![]() |
77817a0f05 | ||
![]() |
590d3ba6cf | ||
![]() |
1e8988f11d | ||
![]() |
a889669cbc | ||
![]() |
fde33e66be | ||
![]() |
6314ddacf2 | ||
![]() |
f935f36b21 | ||
![]() |
082934f73f | ||
![]() |
3e9e01e496 | ||
![]() |
2abbfe719d | ||
![]() |
ace20c5d29 | ||
![]() |
f35fcee6be | ||
![]() |
103370d84a | ||
![]() |
6e47f1645f | ||
![]() |
53eb6c46db | ||
![]() |
00d769d644 | ||
![]() |
b6b34aa0fe | ||
![]() |
83b9196e78 | ||
![]() |
ed76eab694 | ||
![]() |
930b843885 | ||
![]() |
f53c68e005 | ||
![]() |
12a22eebc7 | ||
![]() |
69eb15936c | ||
![]() |
c69dea5465 | ||
![]() |
3bd8c4df28 | ||
![]() |
02dc697831 | ||
![]() |
87cb39b860 | ||
![]() |
468138bb4f | ||
![]() |
a8534b7345 | ||
![]() |
8ba45b0b99 | ||
![]() |
6ae358edd8 | ||
![]() |
5ce45e0cee | ||
![]() |
b2901f1737 | ||
![]() |
6b552dedbc | ||
![]() |
6b3d2c535f | ||
![]() |
2727bd92d8 | ||
![]() |
ebbfc0363b | ||
![]() |
1b6e1fc852 | ||
![]() |
1376ec4887 | ||
![]() |
0eec7c5c53 | ||
![]() |
05dd240997 | ||
![]() |
fb16c81b6c | ||
![]() |
7c3b33416f | ||
![]() |
6755b74d22 | ||
![]() |
d0e843ce03 | ||
![]() |
37f6231f2a | ||
![]() |
d85f25a901 | ||
![]() |
2041b92d3a | ||
![]() |
f461069888 | ||
![]() |
9eb3de85c5 | ||
![]() |
92d970498a | ||
![]() |
bd5e99120d | ||
![]() |
a7e307bd81 | ||
![]() |
55152781cb | ||
![]() |
8ce0c7771c | ||
![]() |
7e0dfa270f | ||
![]() |
7dc485d288 | ||
![]() |
5c6c3b403b | ||
![]() |
242854f266 | ||
![]() |
e9406a7d9e | ||
![]() |
0ac1c52d17 | ||
![]() |
a3c42715db | ||
![]() |
0f27188383 | ||
![]() |
99f3b9f064 | ||
![]() |
d1bc4c4ef1 | ||
![]() |
69a5c55702 | ||
![]() |
2972d5847c | ||
![]() |
1577eb9602 | ||
![]() |
2f97c6ead2 | ||
![]() |
1df4afb53f | ||
![]() |
4991f0e484 | ||
![]() |
09fd7d68eb | ||
![]() |
2ace8a55c1 | ||
![]() |
861acb9467 | ||
![]() |
eea743de46 | ||
![]() |
e2b6e5a7ec | ||
![]() |
2f2dc3695c | ||
![]() |
6eb5e57199 | ||
![]() |
9a047eb95f | ||
![]() |
ef42fd7a2f | ||
![]() |
e642c2ea28 | ||
![]() |
f27d012e0c | ||
![]() |
c638311796 | ||
![]() |
2a02bea405 | ||
![]() |
219b42d991 | ||
![]() |
c290ec1f62 | ||
![]() |
e7ede86733 | ||
![]() |
e3e7609af4 | ||
![]() |
49d7ebec36 | ||
![]() |
7c3d82d819 | ||
![]() |
1c0fbec9ce | ||
![]() |
ca4d60ae25 | ||
![]() |
dc571e20d6 | ||
![]() |
1485275d0c | ||
![]() |
1afbf72037 | ||
![]() |
407fd80f95 | ||
![]() |
62525d9076 | ||
![]() |
c2371263d1 | ||
![]() |
5a870182ec | ||
![]() |
e33ad83256 | ||
![]() |
0352a1df5d | ||
![]() |
ade44bce62 | ||
![]() |
ddb29ebc34 | ||
![]() |
19a62630e5 | ||
![]() |
5626802aa0 | ||
![]() |
f68063afbc | ||
![]() |
8103d019d6 | ||
![]() |
ce89cdd9d7 | ||
![]() |
20d9b356f0 | ||
![]() |
3401438a3a | ||
![]() |
dcf1999d22 | ||
![]() |
9e3c3ae298 | ||
![]() |
40d6b84b4d | ||
![]() |
2db09f27af | ||
![]() |
6979d6a96f | ||
![]() |
deffd2acc9 | ||
![]() |
988f71f434 | ||
![]() |
4fe76f973a | ||
![]() |
8e4e6ad529 | ||
![]() |
3586a2dbe3 | ||
![]() |
4648939043 | ||
![]() |
746eaaf01a | ||
![]() |
bd2f78ae9a | ||
![]() |
a4ebe01dec | ||
![]() |
94e9e18558 | ||
![]() |
d2e0ac4d1f | ||
![]() |
36321fef1c | ||
![]() |
e879877878 | ||
![]() |
f0bce3eb25 | ||
![]() |
316bfd8b7d | ||
![]() |
92593fecd5 | ||
![]() |
8db5fecdf5 | ||
![]() |
eee696f320 | ||
![]() |
8689cf392f | ||
![]() |
15d4cce2eb | ||
![]() |
45fbb82d1a | ||
![]() |
2861c89b89 | ||
![]() |
135bfeeb27 | ||
![]() |
8fa9c66a7d | ||
![]() |
5e6174cbe2 | ||
![]() |
b4ad883b0d | ||
![]() |
a681111a23 | ||
![]() |
d2436afb66 | ||
![]() |
e43444cbb6 | ||
![]() |
8c0d947114 | ||
![]() |
5ba4a2b83a | ||
![]() |
da45073ef9 | ||
![]() |
61e17fb36d | ||
![]() |
9f13a90dd2 | ||
![]() |
ef4b35ea63 | ||
![]() |
66187c8a6e | ||
![]() |
c8d95512fc | ||
![]() |
c74fa648b9 | ||
![]() |
4cc5e9cac6 | ||
![]() |
41345d18f9 | ||
![]() |
0dd1316b68 | ||
![]() |
d8cc185e22 | ||
![]() |
061051270c | ||
![]() |
61445159db | ||
![]() |
7fa3c7f0fa | ||
![]() |
9c0fe30f42 | ||
![]() |
d00010819f | ||
![]() |
248b05b32a | ||
![]() |
8232e934e9 | ||
![]() |
9d005839af | ||
![]() |
a7e5c73608 | ||
![]() |
7896625919 | ||
![]() |
fb43cb8166 | ||
![]() |
28f68e5d11 | ||
![]() |
1199eeed0b | ||
![]() |
8ffeb4900b | ||
![]() |
456550da3f | ||
![]() |
b2676fe2dd | ||
![]() |
8561ec6249 | ||
![]() |
5b775d82ac | ||
![]() |
b43088cc16 | ||
![]() |
237eab136a | ||
![]() |
ffffa2794b | ||
![]() |
433b44403f | ||
![]() |
fa2e1c0653 | ||
![]() |
00257f6824 | ||
![]() |
3b8366f3d3 | ||
![]() |
a73f511404 | ||
![]() |
c823e01baf | ||
![]() |
4188080899 | ||
![]() |
ef6ea2c93f | ||
![]() |
3c672905d0 | ||
![]() |
ee106c747f | ||
![]() |
295726e6b8 | ||
![]() |
2654d64a3c | ||
![]() |
d91ec8500f | ||
![]() |
c354cc51d0 | ||
![]() |
d5747a61e7 | ||
![]() |
e88c747abc | ||
![]() |
cfe9e5bca4 | ||
![]() |
48f7655a62 | ||
![]() |
a1111a9858 | ||
![]() |
b8b9a798bf | ||
![]() |
7a1e94c775 | ||
![]() |
8c4b2173d2 | ||
![]() |
4c4cd7b3ea | ||
![]() |
e92554414b | ||
![]() |
d165e2c94b | ||
![]() |
a97bd31afe | ||
![]() |
d7719b26f9 | ||
![]() |
855c0fd9e0 | ||
![]() |
4156397027 | ||
![]() |
b4bbe5e305 | ||
![]() |
f5b595071e | ||
![]() |
b6f2184cce | ||
![]() |
9288067380 | ||
![]() |
ddfc43be96 | ||
![]() |
63cad5d338 | ||
![]() |
436ecdfb19 | ||
![]() |
06817600e4 | ||
![]() |
4ae1a73d54 | ||
![]() |
f29aab0d03 | ||
![]() |
cea1b3123e | ||
![]() |
b22ccf279d | ||
![]() |
81e15ce36e | ||
![]() |
8907e52933 | ||
![]() |
80cefedac5 | ||
![]() |
b85a66f77a | ||
![]() |
a0ba3d890a | ||
![]() |
315873cbd3 | ||
![]() |
e05095af90 | ||
![]() |
e0d6a73f96 | ||
![]() |
6ebfb41ad9 | ||
![]() |
d0aa01c807 | ||
![]() |
1265c7df47 | ||
![]() |
91e3f14959 | ||
![]() |
5f03eb650d | ||
![]() |
e0e6133444 | ||
![]() |
ee68baf254 | ||
![]() |
785c1a2070 | ||
![]() |
79656655ba | ||
![]() |
74921788a8 | ||
![]() |
b313b28e64 | ||
![]() |
5f1bc15e80 | ||
![]() |
fa9fb60df3 | ||
![]() |
e759e6c410 | ||
![]() |
f41446258a | ||
![]() |
268649654d | ||
![]() |
12e249f64e | ||
![]() |
c34cd76f2a | ||
![]() |
815b210fc8 | ||
![]() |
e5d5efb4c1 | ||
![]() |
0aa4b4d990 | ||
![]() |
01c1d334ae | ||
![]() |
717fc11a46 | ||
![]() |
d21c49e329 | ||
![]() |
6937d9dddc | ||
![]() |
4c2531d5fb | ||
![]() |
62fd890c52 | ||
![]() |
4772fd7723 | ||
![]() |
7c11faceb0 | ||
![]() |
053550e28a | ||
![]() |
3ed7258447 | ||
![]() |
a5cf5baa9e | ||
![]() |
ec8039cc74 | ||
![]() |
9bfa840c27 | ||
![]() |
9865f42335 | ||
![]() |
dba2829871 | ||
![]() |
8c0e1fbed9 | ||
![]() |
187488b75b | ||
![]() |
2aa35fef3e | ||
![]() |
d373fc36ae | ||
![]() |
e483762015 | ||
![]() |
5840a00000 | ||
![]() |
110f836927 | ||
![]() |
d6765f66ae | ||
![]() |
19dac780e8 | ||
![]() |
b82b549c59 | ||
![]() |
b376401ece | ||
![]() |
7d956dbe9e | ||
![]() |
6db1d84bb0 | ||
![]() |
2094fa3056 | ||
![]() |
3d255bc213 | ||
![]() |
5538dda722 | ||
![]() |
1c0d89bf25 | ||
![]() |
4cc0199fbb | ||
![]() |
edb8226fff | ||
![]() |
ef972cf642 | ||
![]() |
50c13541e4 | ||
![]() |
fd5d7cea6e | ||
![]() |
526314b275 | ||
![]() |
7b37c30019 | ||
![]() |
dc03c3ad9e | ||
![]() |
61b485f75d | ||
![]() |
e24151783f | ||
![]() |
ed9714e5ae | ||
![]() |
ea620a083c | ||
![]() |
504a8be666 | ||
![]() |
d460870c77 | ||
![]() |
f0f77251b3 | ||
![]() |
bdd454b70b | ||
![]() |
aea6662774 | ||
![]() |
fe6bcb36c7 | ||
![]() |
2474a2efe1 | ||
![]() |
4cfd49019c | ||
![]() |
7beae8af30 | ||
![]() |
22fc5d2039 | ||
![]() |
b70fc461a4 | ||
![]() |
e756436d7c | ||
![]() |
8dd87e2572 | ||
![]() |
853bf95bd2 | ||
![]() |
1c80d07fd2 | ||
![]() |
6fd8001604 | ||
![]() |
c08f9fd6fc | ||
![]() |
c3fb998414 | ||
![]() |
3368a98210 | ||
![]() |
606b7c7f16 | ||
![]() |
2f4e66be09 | ||
![]() |
9ce3e8707c | ||
![]() |
d6a96745ee | ||
![]() |
a0fcdd092b | ||
![]() |
e17d09e607 | ||
![]() |
847d67f223 | ||
![]() |
7ae0e06a62 | ||
![]() |
d3df97df8b | ||
![]() |
7d5d075809 | ||
![]() |
237a0d8999 | ||
![]() |
6952ed9950 | ||
![]() |
3e2d1bd413 | ||
![]() |
9dfba4659e | ||
![]() |
7fca252aa4 | ||
![]() |
fa23a0228f |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,3 +5,8 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
|
7
.github/workflows/valid-style.yml
vendored
7
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -81,6 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
@@ -1,10 +1,16 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
@@ -214,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
132
bin/spack.ps1
Normal file
132
bin/spack.ps1
Normal file
@@ -0,0 +1,132 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
16
lib/spack/docs/_pygments/style.py
Normal file
16
lib/spack/docs/_pygments/style.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
@@ -149,7 +149,6 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -233,30 +232,8 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -341,16 +318,15 @@ class SpackStyle(DefaultStyle):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
@@ -636,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
@@ -76,6 +76,7 @@ or refer to the full manual below.
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
@@ -1,13 +1,8 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
|
484
lib/spack/docs/signing.rst
Normal file
484
lib/spack/docs/signing.rst
Normal file
@@ -0,0 +1,484 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,30 +416,14 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
@@ -449,9 +433,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
@@ -459,9 +443,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -475,7 +459,7 @@ parse_Wl() {
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
append return_other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -483,177 +467,210 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
unset IFS
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
continue
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -673,12 +690,14 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend flags_list SPACK_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -687,7 +706,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -697,10 +716,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend flags_list SPACK_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -712,10 +731,25 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -741,6 +775,8 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -769,12 +805,16 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -782,11 +822,15 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -795,20 +839,25 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -864,3 +913,4 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
@@ -760,13 +760,12 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
@@ -1209,9 +1208,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
@@ -1281,41 +1277,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1336,7 +1308,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1596,35 +1567,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
|
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
|
@@ -252,7 +252,7 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs != "none":
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
@@ -269,7 +269,7 @@ def initconfig_hardware_entries(self):
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs != "none":
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
@@ -289,6 +289,7 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
@@ -15,7 +16,6 @@
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
@@ -102,11 +102,10 @@ def cuda_flags(arch_list):
|
||||
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
|
@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
|
@@ -23,6 +23,7 @@
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
@@ -167,18 +168,65 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test(self):
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python.path
|
||||
for module in self.import_modules:
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
purpose="checking import of {0}".format(module),
|
||||
with test_part(
|
||||
self,
|
||||
f"test_imports_{module}",
|
||||
purpose=f"checking import of {module}",
|
||||
work_dir="spack-test",
|
||||
)
|
||||
):
|
||||
python("-c", f"import {module}")
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -225,51 +273,6 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
@@ -28,7 +28,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.compilers as compilers
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -70,17 +69,10 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == "specs" else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
"""Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
@@ -93,12 +85,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_str = ""
|
||||
format_args = []
|
||||
|
||||
if phase:
|
||||
format_str += "({{{0}}})".format(item_idx)
|
||||
format_args.append(phase)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_str += "{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
@@ -110,10 +97,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(osarch)
|
||||
@@ -153,49 +139,33 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=False, mirrors_to_check=None
|
||||
):
|
||||
spec_deps_obj = _compute_spec_deps(
|
||||
specs, check_index_only=check_index_only, mirrors_to_check=mirrors_to_check
|
||||
)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = _compute_spec_deps(specs)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj["dependencies"]
|
||||
specs = spec_deps_obj["specs"]
|
||||
|
||||
for entry in specs:
|
||||
spec_labels[entry["label"]] = {
|
||||
"spec": entry["spec"],
|
||||
"needs_rebuild": entry["needs_rebuild"],
|
||||
}
|
||||
spec_labels[entry["label"]] = entry["spec"]
|
||||
|
||||
for entry in dependencies:
|
||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
|
||||
Arguments:
|
||||
specs (Iterable): Specs to build
|
||||
check_index_only (bool): Regardless of whether DAG pruning is enabled,
|
||||
all configured mirrors are searched to see if binaries for specs
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
|
||||
spec_labels: A dictionary mapping the spec labels which are made of
|
||||
(pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
|
||||
keys. The root spec is the spec of which this spec is a dependency
|
||||
and the spec is the formatted spec string for this spec.
|
||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||
as pkg-name/hash-prefix) to concrete specs.
|
||||
|
||||
deps: A dictionary where the keys should also have appeared as keys in
|
||||
the spec_labels dictionary, and the values are the set of
|
||||
@@ -224,13 +194,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(
|
||||
specs,
|
||||
deps,
|
||||
spec_labels,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
_get_spec_dependencies(specs, deps, spec_labels)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -256,24 +220,36 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(" Staging summary ([x] means a job needs rebuilding):")
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
s = spec_labels[job]["spec"]
|
||||
s = spec_labels[job]
|
||||
rebuild = rebuild_decisions[job].rebuild
|
||||
reason = rebuild_decisions[job].reason
|
||||
reason_msg = " ({0})".format(reason) if reason else ""
|
||||
tty.msg(
|
||||
" [{1}] {0} -> {2}".format(
|
||||
job, "x" if spec_labels[job]["needs_rebuild"] else " ", _get_spec_string(s)
|
||||
" [{1}] {0} -> {2}{3}".format(
|
||||
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||
)
|
||||
)
|
||||
if rebuild_decisions[job].mirrors:
|
||||
tty.msg(" found on the following mirrors:")
|
||||
for murl in rebuild_decisions[job].mirrors:
|
||||
tty.msg(" {0}".format(murl))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def _compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -337,12 +313,8 @@ def append_dep(s, d):
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
@@ -352,14 +324,8 @@ def append_dep(s, d):
|
||||
|
||||
append_dep(skey, dkey)
|
||||
|
||||
for spec_label, spec_holder in spec_labels.items():
|
||||
specs.append(
|
||||
{
|
||||
"label": spec_label,
|
||||
"spec": spec_holder["spec"],
|
||||
"needs_rebuild": spec_holder["needs_rebuild"],
|
||||
}
|
||||
)
|
||||
for spec_label, concrete_spec in spec_labels.items():
|
||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
@@ -371,26 +337,17 @@ def _spec_matches(spec, match_string):
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
stage_spec_dict,
|
||||
enable_artifacts_buildcache,
|
||||
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or dep_spec_info["needs_rebuild"]:
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(
|
||||
phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
|
||||
),
|
||||
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
@@ -490,17 +447,12 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
return affected_specs
|
||||
|
||||
|
||||
def _build_jobs(phases, staged_phases):
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
def _build_jobs(spec_labels, stages):
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
|
||||
|
||||
def _noop(x):
|
||||
@@ -519,14 +471,21 @@ def _unpack_script(script_section, op=_noop):
|
||||
return script
|
||||
|
||||
|
||||
class RebuildDecision(object):
|
||||
def __init__(self):
|
||||
self.rebuild = True
|
||||
self.mirrors = []
|
||||
self.reason = ""
|
||||
|
||||
|
||||
class SpackCI:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config, phases, staged_phases):
|
||||
def __init__(self, ci_config, spec_labels, stages):
|
||||
"""Given the information from the ci section of the config
|
||||
and the job phases setup meta data needed for generating Spack
|
||||
and the staged jobs, set up meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
@@ -541,9 +500,6 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"bootstrap": self.ci_config.get(
|
||||
"bootstrap", []
|
||||
), # This is deprecated and should be removed
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -551,7 +507,7 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
||||
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
||||
jobs[dag_hash] = self.__init_job(spec)
|
||||
|
||||
for name in self.named_jobs:
|
||||
@@ -751,7 +707,7 @@ def generate_gitlab_ci_yaml(
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
@@ -873,25 +829,6 @@ def generate_gitlab_ci_yaml(
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if "bootstrap" in ci_config:
|
||||
for phase in ci_config["bootstrap"]:
|
||||
try:
|
||||
phase_name = phase.get("name")
|
||||
strip_compilers = phase.get("compiler-agnostic")
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
)
|
||||
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
@@ -946,7 +883,7 @@ def generate_gitlab_ci_yaml(
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
cli_scopes = [
|
||||
os.path.abspath(s.path)
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
and s.path not in env_includes
|
||||
@@ -993,39 +930,13 @@ def generate_gitlab_ci_yaml(
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.warn(e)
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
if phase_name == "specs":
|
||||
# Anything in the "specs" of the environment are already
|
||||
# concretized by the block at the top of this method, so we
|
||||
# only need to find the concrete versions, and then avoid
|
||||
# re-concretizing them needlessly later on.
|
||||
concrete_phase_specs = [
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists[phase_name]
|
||||
]
|
||||
else:
|
||||
# Any specs lists in other definitions (but not in the
|
||||
# "specs") of the environment are not yet concretized so we
|
||||
# have to concretize them explicitly here.
|
||||
concrete_phase_specs = env.spec_lists[phase_name]
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
for phase_spec in concrete_phase_specs:
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
spec_labels, dependencies, stages = stage_spec_jobs(
|
||||
[
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists["specs"]
|
||||
]
|
||||
)
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -1048,276 +959,212 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||
spack_ci = SpackCI(ci_config, spec_labels, stages)
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
strip_compilers = phase["strip-compilers"]
|
||||
rebuild_decisions = {}
|
||||
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
spec_record = RebuildDecision()
|
||||
rebuild_decisions[spec_label] = spec_record
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, untouched by change.".format(
|
||||
release_spec.name, release_spec.dag_hash()[:7]
|
||||
)
|
||||
)
|
||||
spec_record["needs_rebuild"] = False
|
||||
continue
|
||||
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
spec_record.rebuild = False
|
||||
spec_record.reason = "Pruned, untouched by change."
|
||||
continue
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
if up_to_date_mirrors:
|
||||
spec_record.reason = "Pruned, found in mirrors"
|
||||
spec_record.mirrors = [m["mirror_url"] for m in up_to_date_mirrors]
|
||||
else:
|
||||
spec_record.reason = "Scheduled, not found anywhere"
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
continue
|
||||
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(
|
||||
phase_name, strip_compilers, release_spec, osname, build_group
|
||||
)
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
|
||||
compiler_action = "NONE"
|
||||
if len(phases) > 1:
|
||||
compiler_action = "FIND_ANY"
|
||||
if _is_main_phase(phase_name):
|
||||
compiler_action = "INSTALL_MISSING"
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label]["spec"])
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
spec_labels,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
|
||||
rebuild_spec = spec_record["needs_rebuild"]
|
||||
|
||||
# This next section helps gitlab make sure the right
|
||||
# bootstrapped compiler exists in the artifacts buildcache by
|
||||
# creating an artificial dependency between this spec and its
|
||||
# compiler. So, if we are in the main phase, and if the
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if _is_main_phase(phase_name):
|
||||
spec_arch_family = release_spec.architecture.target.microarchitecture.family
|
||||
compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
|
||||
for bs in bootstrap_specs:
|
||||
c_spec = bs["spec"]
|
||||
bs_arch = c_spec.architecture
|
||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||
if (
|
||||
c_spec.intersects(compiler_pkg_spec)
|
||||
and bs_arch_family == spec_arch_family
|
||||
):
|
||||
# We found the bootstrap compiler this release spec
|
||||
# should be built with, so for DAG scheduling
|
||||
# purposes, we will at least add the compiler spec
|
||||
# to the jobs "needs". But if artifact buildcache
|
||||
# is enabled, we'll have to add all transtive deps
|
||||
# of the compiler as well.
|
||||
|
||||
# Here we check whether the bootstrapped compiler
|
||||
# needs to be rebuilt. Until compilers are proper
|
||||
# dependencies, we artificially force the spec to
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs["phase-name"]]
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
# add this spec to their "needs"
|
||||
spec_record["needs_rebuild"] = rebuild_spec
|
||||
|
||||
dep_jobs = [c_spec]
|
||||
if enable_artifacts_buildcache:
|
||||
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
bs["phase-name"],
|
||||
bs["strip-compilers"],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
else:
|
||||
debug_msg = "".join(
|
||||
[
|
||||
"Considered compiler {0} for spec ",
|
||||
"{1}, but rejected it either because it was ",
|
||||
"not the compiler required by the spec, or ",
|
||||
"because the target arch families of the ",
|
||||
"spec and the compiler did not match",
|
||||
]
|
||||
).format(c_spec, release_spec)
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec and not copy_only_pipeline:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||
release_spec.name, release_spec.dag_hash()
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
rebuild_spec = spec_record.rebuild
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
if not rebuild_spec and not copy_only_pipeline:
|
||||
if prune_dag:
|
||||
spec_record.reason = "Pruned, up-to-date"
|
||||
continue
|
||||
else:
|
||||
# DAG pruning is disabled, force the spec to rebuild. The
|
||||
# record still contains any mirrors on which the spec
|
||||
# may have been found, so we can print them in the staging
|
||||
# summary.
|
||||
spec_record.rebuild = True
|
||||
spec_record.reason = "Scheduled, DAG pruning disabled"
|
||||
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
_print_staging_summary(*phase_stages)
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1576,44 +1423,6 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
"""Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == "INSTALL_MISSING":
|
||||
tty.debug("Make sure bootstrapped compiler will be installed")
|
||||
config = cfg.get("config")
|
||||
config["install_missing_compilers"] = True
|
||||
cfg.set("config", config)
|
||||
elif compiler_action == "FIND_ANY":
|
||||
tty.debug("Just find any available compiler")
|
||||
find_args = ["find"]
|
||||
if scope:
|
||||
find_args.extend(["--scope", scope])
|
||||
output = spack_compiler(*find_args)
|
||||
tty.debug("spack compiler find")
|
||||
tty.debug(output)
|
||||
output = spack_compiler("list")
|
||||
tty.debug("spack compiler list")
|
||||
tty.debug(output)
|
||||
else:
|
||||
tty.debug("No compiler action to be taken")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
|
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
@@ -268,75 +224,21 @@ def setup_parser(subparser):
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob",
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -710,21 +557,11 @@ def sync_fn(args):
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
tty.die("Provide mirrors to sync from and to.")
|
||||
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -803,13 +640,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
@@ -228,7 +228,7 @@ def ci_reindex(args):
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||
@@ -274,7 +274,6 @@ def ci_rebuild(args):
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
@@ -295,7 +294,6 @@ def ci_rebuild(args):
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
@@ -411,14 +409,6 @@ def ci_rebuild(args):
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Depending on the specifics of this job, we might need to turn on the
|
||||
# "config:install_missing compilers" option (to build this job spec
|
||||
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
||||
# (to build a bootstrap compiler or one of its deps in a
|
||||
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
|
@@ -36,7 +36,10 @@ def shell_init_instructions(cmd, equivalent):
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
" %s\\spack_cmd.bat" % spack.paths.bin_path,
|
||||
"",
|
||||
color.colorize("@*c{For PowerShell:}"),
|
||||
" %s\\setup-env.ps1" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run "
|
||||
+ ("one of these" if shell_specific else "this")
|
||||
@@ -50,6 +53,7 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch",
|
||||
equivalent.format(sh_arg="--pwsh") + " # powershell",
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
@@ -349,7 +349,7 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=False,
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
@@ -357,6 +357,17 @@ def install_status():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
|
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -106,19 +106,21 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec.display_str)
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
|
@@ -715,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.split("\n")
|
||||
lines = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
|
@@ -86,6 +86,13 @@ def env_activate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print powershell commands to activate environment",
|
||||
)
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
|
@@ -44,7 +44,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
|
@@ -31,7 +31,11 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
"-y",
|
||||
|
@@ -37,7 +37,6 @@
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
_cache_config_file = []
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
@@ -155,52 +154,65 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
global _cache_config_file
|
||||
_cache_config_file = compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_specs: a list of CompilerSpec objects.
|
||||
scope: configuration scope to modify.
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
# Need a better way for this
|
||||
global _cache_config_file
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
config_length = len(compiler_config)
|
||||
|
||||
filtered_compiler_config = [
|
||||
comp
|
||||
for comp in compiler_config
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
comp["compiler"]["spec"], compiler=True
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
# Update the cache for changes
|
||||
_cache_config_file = filtered_compiler_config
|
||||
if len(filtered_compiler_config) == config_length: # No items removed
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
spack.config.set("compilers", filtered_compiler_config, scope=scope)
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
# Get compilers for this architecture.
|
||||
# Create a cache of the config file so we don't load all the time.
|
||||
global _cache_config_file
|
||||
if not _cache_config_file:
|
||||
_cache_config_file = get_compiler_config(scope, init_config)
|
||||
return _cache_config_file
|
||||
else:
|
||||
return _cache_config_file
|
||||
return get_compiler_config(scope, init_config)
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
|
@@ -81,7 +81,7 @@
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
@@ -111,14 +111,6 @@
|
||||
overrides_base_name = "overrides-"
|
||||
|
||||
|
||||
def first_existing(dictionary, keys):
|
||||
"""Get the value of the first key in keys that is in the dictionary."""
|
||||
try:
|
||||
return next(k for k in keys if k in dictionary)
|
||||
except StopIteration:
|
||||
raise KeyError("None of %s is in dict!" % str(keys))
|
||||
|
||||
|
||||
class ConfigScope(object):
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
@@ -838,12 +830,10 @@ def _config():
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
"""Add updates to a config from a filename"""
|
||||
import spack.environment as ev
|
||||
|
||||
# Get file as config dict
|
||||
# Extract internal attributes, if we are dealing with an environment
|
||||
data = read_config_file(filename)
|
||||
if any(k in data for k in spack.schema.env.keys):
|
||||
data = ev.config_dict(data)
|
||||
if spack.schema.env.TOP_LEVEL_KEY in data:
|
||||
data = data[spack.schema.env.TOP_LEVEL_KEY]
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
@@ -1353,17 +1343,11 @@ def use_configuration(*scopes_or_paths):
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
|
@@ -37,7 +37,7 @@ def validate(configuration_file):
|
||||
config = syaml.load(f)
|
||||
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
env_dict = ev.config_dict(config)
|
||||
env_dict = config[ev.TOP_LEVEL_KEY]
|
||||
env_dict.setdefault(
|
||||
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
|
||||
)
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -31,7 +31,7 @@
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -45,7 +45,7 @@
|
||||
"template": "container/rockylinux_9.dockerfile",
|
||||
"image": "docker.io/rockylinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -59,7 +59,7 @@
|
||||
"template": "container/rockylinux_8.dockerfile",
|
||||
"image": "docker.io/rockylinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -73,7 +73,7 @@
|
||||
"template": "container/almalinux_9.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -87,7 +87,7 @@
|
||||
"template": "container/almalinux_8.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -101,7 +101,7 @@
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
@@ -185,6 +185,16 @@
|
||||
"install": "apt-get -yqq install",
|
||||
"clean": "rm -rf /var/lib/apt/lists/*"
|
||||
},
|
||||
"dnf": {
|
||||
"update": "dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"dnf_epel": {
|
||||
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"yum": {
|
||||
"update": "yum update -y && yum install -y epel-release && yum update -y",
|
||||
"install": "yum install -y",
|
||||
|
@@ -50,7 +50,7 @@ def create(configuration, last_phase=None):
|
||||
configuration (dict): how to generate the current recipe
|
||||
last_phase (str): last phase to be printed or None to print them all
|
||||
"""
|
||||
name = ev.config_dict(configuration)["container"]["format"]
|
||||
name = configuration[ev.TOP_LEVEL_KEY]["container"]["format"]
|
||||
return _writer_factory[name](configuration, last_phase)
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ class PathContext(tengine.Context):
|
||||
template_name: Optional[str] = None
|
||||
|
||||
def __init__(self, config, last_phase):
|
||||
self.config = ev.config_dict(config)
|
||||
self.config = config[ev.TOP_LEVEL_KEY]
|
||||
self.container_config = self.config["container"]
|
||||
|
||||
# Operating system tag as written in the configuration file
|
||||
|
@@ -164,7 +164,10 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
@@ -60,7 +60,7 @@
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = vn.Version("6")
|
||||
_db_version = vn.Version("7")
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -72,6 +72,7 @@
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(vn.Version("0.9.3"), vn.Version("5")),
|
||||
(vn.Version("5"), vn.Version("6")),
|
||||
(vn.Version("6"), vn.Version("7")),
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -105,7 +106,11 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
@@ -743,7 +748,9 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -766,7 +773,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1172,7 +1179,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
@@ -337,6 +337,7 @@
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
TOP_LEVEL_KEY,
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
SpackEnvironmentViewError,
|
||||
@@ -345,7 +346,6 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
config_dict,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -369,6 +369,7 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"TOP_LEVEL_KEY",
|
||||
"Environment",
|
||||
"SpackEnvironmentError",
|
||||
"SpackEnvironmentViewError",
|
||||
@@ -377,7 +378,6 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"config_dict",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
|
@@ -53,6 +53,7 @@
|
||||
import spack.version
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
@@ -124,7 +125,7 @@ def default_manifest_yaml():
|
||||
valid_environment_name_re = r"^\w[\w-]*$"
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
lockfile_format_version = 5
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
@@ -132,6 +133,7 @@ def default_manifest_yaml():
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
5: spack.spec.SpecfileV4,
|
||||
}
|
||||
|
||||
|
||||
@@ -361,19 +363,6 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def config_dict(yaml_data):
|
||||
"""Get the configuration scope section out of an spack.yaml"""
|
||||
# TODO (env:): Remove env: as a possible top level keyword in v0.21
|
||||
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
|
||||
if key == "env":
|
||||
msg = (
|
||||
"using 'env:' as a top-level attribute of a Spack environment is deprecated and "
|
||||
"will be removed in Spack v0.21. Please use 'spack:' instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
return yaml_data[key]
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -821,8 +810,8 @@ def write_transaction(self):
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Read manifest file and set up user specs."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
|
||||
for item in config_dict(self.manifest).get("definitions", []):
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
for item in env_configuration.get("definitions", []):
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
@@ -834,13 +823,13 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
spec_list = config_dict(self.manifest).get(user_speclist_name, [])
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
enable_view = config_dict(self.manifest).get("view")
|
||||
enable_view = env_configuration.get("view")
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
|
||||
@@ -855,14 +844,11 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.manifest)
|
||||
|
||||
# Retrieve unification scheme for the concretizer
|
||||
self.unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = copy.deepcopy(configuration.get("develop", {}))
|
||||
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
|
||||
for name, entry in self.dev_specs.items():
|
||||
# spec must include a concrete version
|
||||
assert Spec(entry["spec"]).versions.concrete_range_as_version
|
||||
@@ -982,7 +968,7 @@ def included_config_scopes(self):
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = config_dict(self.manifest).get("include", [])
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
@@ -1075,10 +1061,7 @@ def env_file_config_scope(self):
|
||||
"""Get the configuration scope for the environment's manifest file."""
|
||||
config_name = self.env_file_config_scope_name()
|
||||
return spack.config.SingleFileScope(
|
||||
config_name,
|
||||
self.manifest_path,
|
||||
spack.schema.env.schema,
|
||||
[spack.config.first_existing(self.manifest, spack.schema.env.keys)],
|
||||
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
def config_scopes(self):
|
||||
@@ -1566,12 +1549,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype="test")
|
||||
for test_dependency in test_deps:
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test"
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2202,9 +2186,9 @@ def _read_lockfile_dict(self, d):
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2684,8 +2668,8 @@ def add_user_spec(self, user_spec: str) -> None:
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
"""
|
||||
config_dict(self.pristine_yaml_content).setdefault("specs", []).append(user_spec)
|
||||
config_dict(self.yaml_content).setdefault("specs", []).append(user_spec)
|
||||
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||
self.configuration.setdefault("specs", []).append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_user_spec(self, user_spec: str) -> None:
|
||||
@@ -2698,8 +2682,8 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"].remove(user_spec)
|
||||
config_dict(self.yaml_content)["specs"].remove(user_spec)
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2716,8 +2700,8 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
SpackEnvironmentError: when the user spec cannot be overridden
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"][idx] = user_spec
|
||||
config_dict(self.yaml_content)["specs"][idx] = user_spec
|
||||
self.pristine_configuration["specs"][idx] = user_spec
|
||||
self.configuration["specs"][idx] = user_spec
|
||||
except ValueError as e:
|
||||
msg = f"cannot override {user_spec} from {self}"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2733,14 +2717,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
Raises:
|
||||
SpackEnvironmentError: is no valid definition exists already
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].append(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2754,7 +2738,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||
or the list does not exist
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = (
|
||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||
f"no valid list exists"
|
||||
@@ -2767,7 +2751,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].remove(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2782,7 +2766,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
Raises:
|
||||
SpackEnvironmentError: if the user spec cannot be overridden
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
@@ -2793,7 +2777,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name][sub_index] = override
|
||||
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2825,24 +2809,24 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
True the default view is used for the environment, if False there's no view.
|
||||
"""
|
||||
if isinstance(view, dict):
|
||||
config_dict(self.pristine_yaml_content)["view"][default_view_name].update(view)
|
||||
config_dict(self.yaml_content)["view"][default_view_name].update(view)
|
||||
self.pristine_configuration["view"][default_view_name].update(view)
|
||||
self.configuration["view"][default_view_name].update(view)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
if not isinstance(view, bool):
|
||||
view = str(view)
|
||||
|
||||
config_dict(self.pristine_yaml_content)["view"] = view
|
||||
config_dict(self.yaml_content)["view"] = view
|
||||
self.pristine_configuration["view"] = view
|
||||
self.configuration["view"] = view
|
||||
self.changed = True
|
||||
|
||||
def remove_default_view(self) -> None:
|
||||
"""Removes the default view from the manifest file"""
|
||||
view_data = config_dict(self.pristine_yaml_content).get("view")
|
||||
view_data = self.pristine_configuration.get("view")
|
||||
if isinstance(view_data, collections.abc.Mapping):
|
||||
config_dict(self.pristine_yaml_content)["view"].pop(default_view_name)
|
||||
config_dict(self.yaml_content)["view"].pop(default_view_name)
|
||||
self.pristine_configuration["view"].pop(default_view_name)
|
||||
self.configuration["view"].pop(default_view_name)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
@@ -2859,12 +2843,10 @@ def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
|
||||
if entry["path"] == pkg_name:
|
||||
entry.pop("path")
|
||||
|
||||
config_dict(self.pristine_yaml_content).setdefault("develop", {}).setdefault(
|
||||
pkg_name, {}
|
||||
).update(entry)
|
||||
config_dict(self.yaml_content).setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
entry
|
||||
)
|
||||
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
|
||||
self.changed = True
|
||||
|
||||
def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
@@ -2877,11 +2859,11 @@ def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
SpackEnvironmentError: if there is nothing to remove
|
||||
"""
|
||||
try:
|
||||
del config_dict(self.pristine_yaml_content)["develop"][pkg_name]
|
||||
del self.pristine_configuration["develop"][pkg_name]
|
||||
except KeyError as e:
|
||||
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
del config_dict(self.yaml_content)["develop"][pkg_name]
|
||||
del self.configuration["develop"][pkg_name]
|
||||
self.changed = True
|
||||
|
||||
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
@@ -2892,11 +2874,11 @@ def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
|
||||
"""
|
||||
init_file_dir = pathlib.Path(init_file_dir).absolute()
|
||||
for _, entry in config_dict(self.pristine_yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.pristine_configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
|
||||
for _, entry in config_dict(self.yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
self.changed = True
|
||||
@@ -2910,6 +2892,16 @@ def flush(self) -> None:
|
||||
_write_yaml(self.pristine_yaml_content, f)
|
||||
self.changed = False
|
||||
|
||||
@property
|
||||
def pristine_configuration(self):
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||
return self.yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.yaml_content)
|
||||
|
||||
|
@@ -42,6 +42,8 @@ def activate_header(env, shell, prompt=None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -79,6 +81,8 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
@@ -544,6 +544,7 @@ def _static_edges(specs, deptype):
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -231,7 +231,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
|
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -671,7 +671,14 @@ def configure_options(self):
|
||||
# the configure option section
|
||||
return None
|
||||
|
||||
def modification_needs_formatting(self, modification):
|
||||
"""Returns True if environment modification entry needs to be formatted."""
|
||||
return (
|
||||
not isinstance(modification, (spack.util.environment.SetEnv)) or not modification.raw
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -733,15 +740,29 @@ def environment_modifications(self):
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = spec.format(x.name, transform=transform)
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
x.name = str(x.name).replace("-", "_")
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
|
@@ -291,7 +291,7 @@ def next_spec(
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV4.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
@@ -676,7 +676,7 @@ def is_relocatable(spec):
|
||||
Raises:
|
||||
ValueError: if the spec is not installed
|
||||
"""
|
||||
if not spec.install_status():
|
||||
if not spec.installed:
|
||||
raise ValueError("spec is not installed [{0}]".format(str(spec)))
|
||||
|
||||
if spec.external or spec.virtual:
|
||||
|
@@ -134,23 +134,6 @@
|
||||
core_shared_properties = union_dicts(
|
||||
{
|
||||
"pipeline-gen": pipeline_gen_schema,
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
@@ -209,7 +192,7 @@ def update(data):
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ev.config_dict(ci_env.manifest)
|
||||
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
|
@@ -15,8 +15,8 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ("spack", "env")
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
spec_list_schema = {
|
||||
"type": "array",
|
||||
@@ -47,8 +47,8 @@
|
||||
"title": "Spack environment file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
"^env|spack$": {
|
||||
"properties": {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
|
@@ -614,23 +614,6 @@ def multiple_values_error(self, attribute, pkg):
|
||||
def no_value_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
|
||||
def _get_cause_tree(self, cause, conditions, condition_causes, literals, indent=" "):
|
||||
parents = [c for e, c in condition_causes if e == cause]
|
||||
local = "required because %s " % conditions[cause]
|
||||
|
||||
return [indent + local] + [
|
||||
c
|
||||
for parent in parents
|
||||
for c in self._get_cause_tree(
|
||||
parent, conditions, condition_causes, literals, indent=indent + " "
|
||||
)
|
||||
]
|
||||
|
||||
def get_cause_tree(self, cause):
|
||||
conditions = dict(extract_args(self.model, "condition"))
|
||||
condition_causes = list(extract_args(self.model, "condition_cause"))
|
||||
return self._get_cause_tree(cause, conditions, condition_causes, [])
|
||||
|
||||
def handle_error(self, msg, *args):
|
||||
"""Handle an error state derived by the solver."""
|
||||
if msg == "multiple_values_error":
|
||||
@@ -639,28 +622,14 @@ def handle_error(self, msg, *args):
|
||||
if msg == "no_value_error":
|
||||
return self.no_value_error(*args)
|
||||
|
||||
try:
|
||||
idx = args.index("startcauses")
|
||||
except ValueError:
|
||||
msg_args = args
|
||||
cause_args = []
|
||||
else:
|
||||
msg_args = args[:idx]
|
||||
cause_args = args[idx + 1 :]
|
||||
|
||||
msg = msg.format(*msg_args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
msg = msg.format(*args)
|
||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
|
||||
for cause in set(cause_args):
|
||||
for c in self.get_cause_tree(cause):
|
||||
msg += f"\n{c}"
|
||||
|
||||
return msg
|
||||
|
||||
def message(self, errors) -> str:
|
||||
@@ -806,8 +775,6 @@ def visit(node):
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if spack.error.debug:
|
||||
self.control.load(os.path.join(parent_dir, "causation.lp"))
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -868,13 +835,7 @@ def on_model(model):
|
||||
|
||||
# print any unknown functions in the model
|
||||
for sym in best_model:
|
||||
if sym.name not in (
|
||||
"attr",
|
||||
"error",
|
||||
"opt_criterion",
|
||||
"condition",
|
||||
"condition_cause",
|
||||
):
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
)
|
||||
@@ -1305,11 +1266,7 @@ def package_provider_rules(self, pkg):
|
||||
for when in whens:
|
||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_condition_holds", pkg.name, provided.name
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
@@ -1330,25 +1287,16 @@ def package_dependencies_rules(self, pkg):
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec)
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += " when %s" % cond
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.condition_requirement(condition_id, "spack_installed", pkg.name))
|
||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "dependency_holds", pkg.name, dep.spec.name, t
|
||||
)
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_node" if dep.spec.virtual else "node", dep.spec.name
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1502,11 +1450,7 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "external_conditions_hold", pkg_name, local_idx
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
@@ -2350,29 +2294,16 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.define_target_constraints()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in specs:
|
||||
for idx, spec in enumerate(specs):
|
||||
self.gen.h2("Spec: %s" % str(spec))
|
||||
self.gen.fact(fn.literal(idx))
|
||||
|
||||
# cannot use self.condition because it requires condition requirements
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id, "%s is provided as input spec" % spec))
|
||||
self.gen.fact(fn.literal(condition_id))
|
||||
|
||||
self.gen.fact(fn.condition_requirement(condition_id, "literal_solved", condition_id))
|
||||
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_root" if spec.virtual else "root", spec.name
|
||||
)
|
||||
)
|
||||
|
||||
self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(fn.imposed_constraint(condition_id, *clause.args))
|
||||
self.gen.fact(fn.literal(idx, *clause.args))
|
||||
if clause.args[0] == "variant_set":
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "variant_default_value_from_cli", *clause.args[1:]
|
||||
)
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
|
||||
if self.concretize_everything:
|
||||
@@ -2466,8 +2397,6 @@ class SpecBuilder(object):
|
||||
r"^root$",
|
||||
r"^virtual_node$",
|
||||
r"^virtual_root$",
|
||||
r"^.*holds?$",
|
||||
r"^literal.*$",
|
||||
]
|
||||
)
|
||||
)
|
||||
@@ -2571,10 +2500,15 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
@@ -2652,6 +2586,8 @@ def sort_fn(function_tuple):
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
return (0, 0) # note out of order so this goes last
|
||||
elif name == "virtual_on_edge":
|
||||
return (1, 0)
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
|
@@ -1,72 +0,0 @@
|
||||
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
% associated conditions by cause -> effect
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1),
|
||||
condition_requirement(Effect, Name, A1),
|
||||
imposed_constraint(Cause, Name, A1).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2),
|
||||
condition_requirement(Effect, Name, A1, A2),
|
||||
imposed_constraint(Cause, Name, A1, A2).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2, A3),
|
||||
condition_requirement(Effect, Name, A1, A2, A3),
|
||||
imposed_constraint(Cause, Name, A1, A2, A3).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2, A3, A4),
|
||||
condition_requirement(Effect, Name, A1, A2, A3, A4),
|
||||
imposed_constraint(Cause, Name, A1, A2, A3, A4).
|
||||
|
||||
% At most one variant for single valued variants
|
||||
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, Cause2)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
attr("variant_value", Package, Variant, Value1),
|
||||
imposed_constraint(Cause1, "variant_set", Package, Variant, Value1),
|
||||
condition_holds(Cause1),
|
||||
attr("variant_value", Package, Variant, Value2),
|
||||
imposed_constraint(Cause2, "variant_set", Package, Variant, Value2),
|
||||
condition_holds(Cause2),
|
||||
Value1 < Value2. % see[1] in concretize.lp
|
||||
|
||||
% We cannot have a version that violates another version constraint
|
||||
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Package, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||
:- attr("node", Package),
|
||||
attr("version", Package, Version),
|
||||
imposed_constraint(VersionCause, "node_version_satisfies", Package, Version),
|
||||
condition_holds(VersionCause),
|
||||
attr("node_version_satisfies", Package, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
not version_satisfies(Package, Constraint, Version).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
% Error to catch how it happens
|
||||
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Virtual, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||
:- attr("virtual_node", Virtual),
|
||||
attr("version", Virtual, Version),
|
||||
imposed_constraint(VersionCause, "node_version_satisfies", Virtual, Version),
|
||||
condition_holds(VersionCause),
|
||||
attr("node_version_satisfies", Virtual, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Virtual, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
not version_satisfies(Virtual, Constraint, Version).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(0, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
attr("version", Package, Version),
|
||||
not version_satisfies(Package, Constraint, Version).
|
@@ -12,8 +12,8 @@
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ attr("literal_solved", ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
|
||||
{ literal_solved(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
||||
|
||||
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
||||
:- literal_not_solved(ID), concretize_everything.
|
||||
@@ -21,14 +21,24 @@ literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { attr("literal_solved", ID) : literal(ID) }.
|
||||
1 { literal_solved(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
% Map constraint on the literal ID to the correct PSID
|
||||
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2) :- literal(LiteralID, Name, A1, A2), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2, A3) :- literal(LiteralID, Name, A1, A2, A3), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_solved(LiteralID).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
#defined literal/3.
|
||||
#defined literal/4.
|
||||
#defined literal/5.
|
||||
#defined literal/6.
|
||||
|
||||
% Attributes for node packages which must have a single value
|
||||
attr_single_value("version").
|
||||
@@ -48,13 +58,6 @@ error(100, multiple_values_error, Attribute, Package)
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, Package, Version) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Define functions for error handling
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
#defined error/9.
|
||||
#defined condition_cause/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -93,18 +96,7 @@ version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package
|
||||
{ attr("version", Package, Version) : version_declared(Package, Version) }
|
||||
:- attr("node", Package).
|
||||
|
||||
% Error to ensure structure of the program is not violated
|
||||
error(2, "No version from '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- attr("node", Package),
|
||||
attr("version", Package, Version1),
|
||||
attr("version", Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- attr("node", Package), not attr("version", Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
% fallback error for structure in case there's another way for it to happen
|
||||
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
||||
:- attr("virtual_node", Virtual),
|
||||
2 { attr("version", Virtual, Version) }.
|
||||
@@ -158,7 +150,8 @@ possible_version_weight(Package, Weight)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
version_satisfies(Package, Constraint, _).
|
||||
|
||||
% Error for structure of program
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
attr("version", Package, Version),
|
||||
@@ -189,8 +182,9 @@ condition_holds(ID) :-
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3);
|
||||
attr(Name, A1, A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4).
|
||||
|
||||
% condition_holds(ID) implies all imposed_constraints.
|
||||
impose(ID) :- condition_holds(ID).
|
||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
||||
|
||||
% conditions that hold impose constraints on other specs
|
||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||
@@ -235,19 +229,33 @@ depends_on(Package, Dependency) :- attr("depends_on", Package, Dependency, _).
|
||||
% a dependency holds if its condition holds and if it is not external or
|
||||
% concrete. We chop off dependencies for externals, and dependencies of
|
||||
% concrete specs don't need to be resolved -- they arise from the concrete
|
||||
% specs themselves. This attr is used in constraints from dependency conditions
|
||||
attr("spack_installed", Package) :- build(Package), not external(Package).
|
||||
% specs themselves.
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
build(Package),
|
||||
not external(Package),
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(ID) :-
|
||||
not dependency_holds(Package, Dependency, _),
|
||||
dependency_condition(ID, Package, Dependency).
|
||||
|
||||
% declared dependencies are real if they're not virtual AND
|
||||
% the package is not an external.
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
attr("depends_on", Package, Dependency, Type)
|
||||
:- attr("dependency_holds", Package, Dependency, Type),
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
|
||||
% every root must be a node
|
||||
attr("node", Package) :- attr("root", Package).
|
||||
|
||||
% dependencies imply new nodes
|
||||
attr("node", Dependency) :- attr("node", Package), depends_on(Package, Dependency).
|
||||
|
||||
% all nodes in the graph must be reachable from some root
|
||||
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
@@ -288,18 +296,20 @@ error(1, Msg) :- attr("node", Package),
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
attr("depends_on", Package, Provider, Type)
|
||||
:- attr("dependency_holds", Package, Virtual, Type),
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% If a package depends on a provider, the provider must be a node
|
||||
% nodes that are not indirected by a virtual are instantiated
|
||||
% directly from the imposed constraints of the dependency condition
|
||||
attr("node", Provider)
|
||||
:- attr("dependency_holds", Package, Virtual, Type),
|
||||
attr("virtual_on_edge", Package, Provider, Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
{ provider(Package, Virtual) : possible_provider(Package, Virtual) }
|
||||
@@ -325,11 +335,17 @@ attr("root", Package) :- attr("virtual_root", Virtual), provider(Package, Virtua
|
||||
% for environments that are concretized together (e.g. where we
|
||||
% asks to install "mpich" and "hdf5+mpi" and we want "mpich" to
|
||||
% be the mpi provider)
|
||||
provider(Package, Virtual) :- attr("node", Package), attr("virtual_condition_holds", Package, Virtual).
|
||||
provider(Package, Virtual) :- attr("node", Package), virtual_condition_holds(Package, Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
virtual_condition_holds(Provider, Virtual) :-
|
||||
provider_condition(ID, Provider, Virtual),
|
||||
condition_holds(ID),
|
||||
virtual(Virtual).
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(Package, Virtual), not attr("virtual_condition_holds", Package, Virtual),
|
||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined possible_provider/2.
|
||||
@@ -371,8 +387,14 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
||||
|
||||
% do not warn if generated program contains none of these.
|
||||
#defined possible_provider/2.
|
||||
#defined provider_condition/3.
|
||||
#defined required_provider_condition/3.
|
||||
#defined required_provider_condition/4.
|
||||
#defined required_provider_condition/5.
|
||||
#defined required_provider_condition/6.
|
||||
#defined declared_dependency/3.
|
||||
#defined virtual/1.
|
||||
#defined virtual_condition_holds/2.
|
||||
#defined external/1.
|
||||
#defined external_spec/2.
|
||||
#defined external_version_declared/4.
|
||||
@@ -420,15 +442,25 @@ external(Package) :- attr("external_spec_selected", Package, _).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
attr("external_spec_selected", Package, LocalIndex) :-
|
||||
attr("external_conditions_hold", Package, LocalIndex),
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
attr("node", Package),
|
||||
not attr("hash", Package, _).
|
||||
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not attr("external_conditions_hold", Package, _).
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
#defined external_spec_condition/3.
|
||||
#defined external_spec_condition/4.
|
||||
#defined external_spec_condition/5.
|
||||
#defined external_spec_condition/6.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Config required semantics
|
||||
@@ -567,6 +599,7 @@ attr("variant_value", Package, Variant, Value) :-
|
||||
variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
|
||||
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
@@ -637,7 +670,7 @@ variant_default_not_used(Package, Variant, Value)
|
||||
external_with_variant_set(Package, Variant, Value)
|
||||
:- attr("variant_value", Package, Variant, Value),
|
||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||
imposed_constraint(ID, "external_conditions_hold", Package, _),
|
||||
possible_external(ID, Package, _),
|
||||
external(Package),
|
||||
attr("node", Package).
|
||||
|
||||
|
@@ -23,12 +23,5 @@
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
#show error/8.
|
||||
#show error/9.
|
||||
|
||||
% show cause -> effect data for errors
|
||||
#show condition_cause/2.
|
||||
#show condition/2.
|
||||
|
||||
% debug
|
||||
|
@@ -50,6 +50,7 @@
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import enum
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -170,7 +171,17 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
# InstallStatus is used to map install statuses to symbols for display
|
||||
# Options are artificially disjoint for dispay purposes
|
||||
class InstallStatus(enum.Enum):
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
absent = "@K{ - } "
|
||||
missing = "@r{[-]} "
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -714,47 +725,81 @@ class DependencySpec:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "deptypes"
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
|
||||
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.deptypes = dp.canonical_deptype(deptypes)
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
|
||||
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
|
||||
deptypes = set(deptypes)
|
||||
deptypes.update(self.deptypes)
|
||||
deptypes = tuple(sorted(deptypes))
|
||||
changed = self.deptypes != deptypes
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
self.deptypes = deptypes
|
||||
return changed
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
|
||||
|
||||
def add_type(self, type: dp.DependencyArgument):
|
||||
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "%s %s--> %s" % (
|
||||
self.parent.name if self.parent else None,
|
||||
self.deptypes,
|
||||
self.spec.name if self.spec else None,
|
||||
)
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
class CompilerFlag(str):
|
||||
@@ -1575,10 +1620,12 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
@@ -1596,34 +1643,58 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
)
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptypes)
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1896,12 +1967,12 @@ def lookup_hash(self):
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=())
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=())
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2036,8 +2107,14 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
@@ -2361,7 +2438,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2379,8 +2456,10 @@ def from_dict(data):
|
||||
spec = SpecfileV1.load(data)
|
||||
elif int(data["spec"]["_meta"]["version"]) == 2:
|
||||
spec = SpecfileV2.load(data)
|
||||
else:
|
||||
elif int(data["spec"]["_meta"]["version"]) == 3:
|
||||
spec = SpecfileV3.load(data)
|
||||
else:
|
||||
spec = SpecfileV4.load(data)
|
||||
|
||||
# Any git version should
|
||||
for s in spec.traverse():
|
||||
@@ -2529,6 +2608,7 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||
def _replace_with(self, concrete):
|
||||
"""Replace this virtual spec with a concrete spec."""
|
||||
assert self.virtual
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
@@ -2539,7 +2619,11 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -3180,7 +3264,9 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
@@ -3236,7 +3322,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3573,15 +3659,20 @@ def _constrain_dependencies(self, other):
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3965,7 +4056,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4401,12 +4492,20 @@ def __str__(self):
|
||||
def install_status(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.installed
|
||||
except KeyError:
|
||||
return None
|
||||
return InstallStatus.absent
|
||||
|
||||
if self.external:
|
||||
return InstallStatus.external
|
||||
|
||||
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
|
||||
if not record:
|
||||
return InstallStatus.absent
|
||||
elif upstream and record.installed:
|
||||
return InstallStatus.upstream
|
||||
elif record.installed:
|
||||
return InstallStatus.installed
|
||||
else:
|
||||
return InstallStatus.missing
|
||||
|
||||
def _installed_explicitly(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
@@ -4420,7 +4519,10 @@ def _installed_explicitly(self):
|
||||
|
||||
def tree(self, **kwargs):
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation."""
|
||||
with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
"""
|
||||
color = kwargs.pop("color", clr.get_color_when())
|
||||
depth = kwargs.pop("depth", False)
|
||||
hashes = kwargs.pop("hashes", False)
|
||||
@@ -4452,14 +4554,12 @@ def tree(self, **kwargs):
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if node.installed_upstream:
|
||||
out += clr.colorize("@g{[^]} ", color=color)
|
||||
elif status is None:
|
||||
out += clr.colorize("@K{ - } ", color=color) # !installed
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color) # installed
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color) # missing
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
|
||||
@@ -4635,12 +4735,16 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4730,11 +4834,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def reconstruct_virtuals_on_edges(spec):
|
||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||
|
||||
Args:
|
||||
spec: spec on which we want to reconstruct virtuals
|
||||
"""
|
||||
# Collect all possible virtuals
|
||||
possible_virtuals = set()
|
||||
for node in spec.traverse():
|
||||
try:
|
||||
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||
except Exception as e:
|
||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||
continue
|
||||
|
||||
# Assume all incoming edges to provider are marked with virtuals=
|
||||
for vspec in possible_virtuals:
|
||||
try:
|
||||
provider = spec[vspec]
|
||||
except KeyError:
|
||||
# Virtual not in the DAG
|
||||
continue
|
||||
|
||||
for edge in provider.edges_from_dependents():
|
||||
edge.update_virtuals([vspec])
|
||||
|
||||
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
@@ -4818,7 +4951,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@@ -4849,8 +4982,10 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
@@ -4884,9 +5019,10 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4915,18 +5051,20 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
virtuals = []
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4960,7 +5098,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4980,6 +5118,20 @@ class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class SpecfileV4(SpecfileV2):
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
return cls._load(data)
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
|
@@ -115,9 +115,6 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -204,12 +201,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
|
||||
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -217,13 +214,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-au", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-afu", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -249,35 +246,10 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
|
||||
def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
"""
|
||||
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install 'corge' without using a cache
|
||||
install_cmd("--no-cache", cspec.name)
|
||||
|
||||
# Create build cache with relative rpaths
|
||||
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -294,19 +266,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -323,7 +295,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
cspec = Spec("corge").concretized()
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome):
|
||||
@@ -404,7 +376,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
install_cmd(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
@@ -433,8 +405,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
install_cmd("--no-cache", s.name)
|
||||
|
||||
# Create a buildcache and update index
|
||||
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
# Check package and dependency in buildcache
|
||||
cache_list = buildcache_cmd("list", "--allarch")
|
||||
@@ -446,7 +418,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
with spack.config.override("config:binary_index_ttl", 0):
|
||||
# Check dependency not in buildcache
|
||||
@@ -522,10 +494,10 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -541,7 +513,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.create
|
||||
@@ -12,8 +10,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
|
@@ -173,7 +173,7 @@ def wrapper_environment(working_env):
|
||||
SPACK_DTAGS_TO_ADD="--disable-new-dtags",
|
||||
SPACK_DTAGS_TO_STRIP="--enable-new-dtags",
|
||||
SPACK_COMPILER_FLAGS_KEEP="",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
|
||||
):
|
||||
yield
|
||||
|
||||
@@ -278,8 +278,8 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
ld,
|
||||
test_args,
|
||||
["ld"]
|
||||
+ spack_ldflags
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
@@ -293,10 +293,10 @@ def test_cpp_flags(wrapper_environment, wrapper_flags):
|
||||
cpp,
|
||||
test_args,
|
||||
["cpp"]
|
||||
+ spack_cppflags
|
||||
+ test_include_paths
|
||||
+ test_library_paths
|
||||
+ test_args_without_paths,
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags,
|
||||
)
|
||||
|
||||
|
||||
@@ -306,10 +306,14 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -320,10 +324,13 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cxxflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -334,10 +341,14 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_fflags
|
||||
+ spack_cppflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
|
@@ -46,31 +46,6 @@ def test_import_signing_key(mock_gnupghome):
|
||||
ci.import_signing_key(signing_key)
|
||||
|
||||
|
||||
def test_configure_compilers(mutable_config):
|
||||
def assert_missing(config):
|
||||
assert (
|
||||
"install_missing_compilers" not in config
|
||||
or config["install_missing_compilers"] is False
|
||||
)
|
||||
|
||||
def assert_present(config):
|
||||
assert (
|
||||
"install_missing_compilers" in config and config["install_missing_compilers"] is True
|
||||
)
|
||||
|
||||
original_config = spack.config.get("config")
|
||||
assert_missing(original_config)
|
||||
|
||||
ci.configure_compilers("FIND_ANY", scope="site")
|
||||
|
||||
second_config = spack.config.get("config")
|
||||
assert_missing(second_config)
|
||||
|
||||
ci.configure_compilers("INSTALL_MISSING")
|
||||
last_config = spack.config.get("config")
|
||||
assert_present(last_config)
|
||||
|
||||
|
||||
class FakeWebResponder(object):
|
||||
def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._resp_code = response_code
|
||||
@@ -248,7 +223,7 @@ def test_ci_workarounds():
|
||||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
|
||||
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
@@ -291,7 +266,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
@@ -35,12 +35,15 @@ def test_build_env_requires_a_spec(args):
|
||||
_out_file = "env.out"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_dump(tmpdir):
|
||||
def test_dump(shell_as, shell, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
if sys.platform == "win32":
|
||||
if shell == "pwsh":
|
||||
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
||||
elif shell == "bat":
|
||||
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||
else:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
|
@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
|
||||
add(pkg)
|
||||
install()
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
"""Ensure that buildcache create fails when given no args or
|
||||
environment."""
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
|
||||
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
|
||||
|
||||
tmpdir.chmod(0)
|
||||
with pytest.raises(OSError) as error:
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
|
||||
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
|
||||
assert error.value.errno == errno.EACCES
|
||||
tmpdir.chmod(0o700)
|
||||
|
||||
@@ -159,11 +159,11 @@ def test_update_key_index(
|
||||
# Put installed package in the buildcache, which, because we're signing
|
||||
# it, should result in the public key getting pushed to the buildcache
|
||||
# as well.
|
||||
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache("push", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
# Now make sure that when we pass the "--keys" argument to update-index
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
|
||||
@@ -213,27 +213,25 @@ def verify_mirror_contents():
|
||||
# Install a package and put it in the buildcache
|
||||
s = Spec(out_env_pkg).concretized()
|
||||
install(s.name)
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add(in_env_pkg)
|
||||
install()
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
|
||||
|
||||
# Now run the spack buildcache sync command with all the various options
|
||||
# for specifying mirrors
|
||||
|
||||
# Use urls to specify mirrors
|
||||
buildcache(
|
||||
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
|
||||
)
|
||||
buildcache("sync", src_mirror_url, dest_mirror_url)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
# Use local directory paths to specify fs locations
|
||||
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
|
||||
buildcache("sync", src_mirror_dir, dest_mirror_dir)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
@@ -242,7 +240,7 @@ def verify_mirror_contents():
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
|
||||
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
@@ -260,7 +258,7 @@ def test_buildcache_create_install(
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -324,12 +322,12 @@ def fake_push(node, push_url, options):
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
|
||||
|
||||
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
|
||||
buildcache_create_args = ["create", "--unsigned"]
|
||||
|
||||
if things_to_install != "":
|
||||
buildcache_create_args.extend(["--only", things_to_install])
|
||||
|
||||
buildcache_create_args.extend([slash_hash])
|
||||
buildcache_create_args.extend([str(tmpdir), slash_hash])
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.ci as ci
|
||||
import spack.compilers as compilers
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
@@ -30,7 +29,7 @@
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.spec import Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
@@ -163,8 +162,6 @@ def test_ci_generate_with_env(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -179,9 +176,6 @@ def test_ci_generate_with_env(
|
||||
mirrors:
|
||||
some-mirror: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -221,16 +215,10 @@ def test_ci_generate_with_env(
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
@@ -244,155 +232,6 @@ def test_ci_generate_with_env(
|
||||
assert artifacts_root == "jobs_scratch_dir"
|
||||
|
||||
|
||||
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
||||
"""Validate the needs graph in the generate CI"""
|
||||
|
||||
# TODO: Fix the logic to catch errors where expected packages/needs are not
|
||||
# found.
|
||||
for job_name, job_def in yaml_contents.items():
|
||||
for needs_def_name, needs_list in needs_graph.items():
|
||||
if job_name.startswith(needs_def_name):
|
||||
# check job needs against the expected needs definition
|
||||
j_needs = job_def["needs"]
|
||||
assert all(
|
||||
[
|
||||
job_needs["job"][: job_needs["job"].index("/")] in needs_list
|
||||
for job_needs in j_needs
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[nl in [n["job"][: n["job"].index("/")] for n in j_needs] for nl in needs_list]
|
||||
)
|
||||
assert all([job_needs["artifacts"] == artifacts for job_needs in j_needs])
|
||||
break
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_gcc(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler and use it as the
|
||||
compiler for a spec in the environment"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
|
||||
"(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler when artifacts buildcache
|
||||
is turned on"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
enable-artifacts-buildcache: True
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
|
||||
"(specs) dyninst": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
"(specs) libelf",
|
||||
"(specs) libdwarf",
|
||||
],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, True)
|
||||
|
||||
|
||||
def test_ci_generate_with_env_missing_section(
|
||||
tmpdir,
|
||||
working_env,
|
||||
@@ -889,7 +728,7 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
|
||||
"SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash,
|
||||
"SPACK_JOB_SPEC_PKG_NAME": pkg_name,
|
||||
"SPACK_COMPILER_ACTION": "NONE",
|
||||
"SPACK_CDASH_BUILD_NAME": "(specs) {0}".format(pkg_name),
|
||||
"SPACK_CDASH_BUILD_NAME": pkg_name,
|
||||
"SPACK_REMOTE_MIRROR_URL": rebuild_env.mirror_url,
|
||||
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
||||
"CI_JOB_URL": rebuild_env.ci_job_url,
|
||||
@@ -1055,7 +894,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -1155,8 +994,8 @@ def test_ci_generate_mirror_override(
|
||||
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
@@ -1283,7 +1122,7 @@ def test_push_mirror_contents(
|
||||
found_spec_job = False
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) patchelf" in ci_key:
|
||||
if "patchelf" in ci_key:
|
||||
the_elt = yaml_contents[ci_key]
|
||||
assert "variables" in the_elt
|
||||
job_vars = the_elt["variables"]
|
||||
@@ -1297,7 +1136,7 @@ def test_push_mirror_contents(
|
||||
mirror_cmd("rm", "test-ci")
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url)
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
index_path = os.path.join(buildcache_path, "index.json")
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
@@ -1457,7 +1296,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) a" in ci_key:
|
||||
if ci_key.startswith("a"):
|
||||
# Make sure a's attributes override variables, and all the
|
||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||
# appear twice, but that a's specific extra tag does appear
|
||||
@@ -1477,7 +1316,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "custom main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "custom post step one"
|
||||
if "(specs) dependency-install" in ci_key:
|
||||
if "dependency-install" in ci_key:
|
||||
# Since the dependency-install match omits any
|
||||
# runner-attributes, make sure it inherited all the
|
||||
# top-level attributes.
|
||||
@@ -1495,7 +1334,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "post step one"
|
||||
if "(specs) flatten-deps" in ci_key:
|
||||
if "flatten-deps" in ci_key:
|
||||
# The flatten-deps match specifies that we keep the two
|
||||
# top level variables, but add a third specifc one. It
|
||||
# also adds a custom tag which should be combined with
|
||||
@@ -1554,9 +1393,10 @@ def test_ci_generate_with_workarounds(
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_one = False
|
||||
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs) "):
|
||||
if ci_key not in non_rebuild_keys:
|
||||
found_one = True
|
||||
job_obj = yaml_contents[ci_key]
|
||||
assert "needs" not in job_obj
|
||||
@@ -1613,7 +1453,7 @@ def test_ci_rebuild_index(
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
@@ -1623,140 +1463,6 @@ def test_ci_rebuild_index(
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_prune_dag(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
mock_fetch,
|
||||
mock_archive,
|
||||
mutable_config,
|
||||
monkeypatch,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
ci_base_environment,
|
||||
):
|
||||
"""Test compiler bootstrapping with DAG pruning. Specifically, make
|
||||
sure that if we detect the bootstrapped compiler needs to be rebuilt,
|
||||
we ensure the spec we want to build with that compiler is scheduled
|
||||
for rebuild as well."""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
# Install a compiler, because we want to put it in a buildcache
|
||||
install_cmd("gcc@=12.2.0%gcc@10.2.1")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
assert CompilerSpec("gcc@=12.2.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Configure the mirror where we put that buildcache w/ the compiler
|
||||
mirror_cmd("add", "test-mirror", mirror_url)
|
||||
|
||||
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
|
||||
|
||||
# Put spec built with installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
|
||||
# Now uninstall the spec
|
||||
uninstall_cmd("-y", "b%gcc@12.2.0")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@=12.2.0%gcc@10.2.1
|
||||
specs:
|
||||
- b%gcc@12.2.0
|
||||
mirrors:
|
||||
atestm: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
# Without this monkeypatch, pipeline generation process would think that
|
||||
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
||||
# process sees the compiler as needing a rebuild, which should then result
|
||||
# in the specs built with that compiler needing a rebuild too.
|
||||
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec.name == "gcc":
|
||||
return []
|
||||
else:
|
||||
return [{"spec": spec, "mirror_url": mirror_url}]
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
original_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
# without the monkeypatch, everything appears up to date and no
|
||||
# rebuild jobs are generated.
|
||||
assert original_yaml_contents
|
||||
assert "no-specs-to-rebuild" in original_yaml_contents
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.binary_distribution, "get_mirrors_for_spec", fake_get_mirrors_for_spec
|
||||
)
|
||||
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
new_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
assert new_yaml_contents
|
||||
|
||||
# This 'needs' graph reflects that even though specs 'a' and 'b' do
|
||||
# not otherwise need to be rebuilt (thanks to DAG pruning), they
|
||||
# both end up in the generated pipeline because the compiler they
|
||||
# depend on is bootstrapped, and *does* need to be rebuilt.
|
||||
needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
|
||||
|
||||
_validate_needs_graph(new_yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
|
||||
"""Test that we can detect the change to .gitlab-ci.yml in a
|
||||
mock spack git repo."""
|
||||
@@ -1828,7 +1534,7 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
generated_hashes = []
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs)"):
|
||||
if "variables" in yaml_contents[ci_key]:
|
||||
generated_hashes.append(
|
||||
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
|
||||
)
|
||||
@@ -2240,9 +1946,7 @@ def test_ci_reproduce(
|
||||
ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root)
|
||||
|
||||
target_name = spack.platforms.test.Test.default
|
||||
job_name = ci.get_job_name(
|
||||
"specs", False, job_spec, "test-debian6-%s" % target_name, None
|
||||
)
|
||||
job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None)
|
||||
|
||||
repro_file = os.path.join(working_dir.strpath, "repro.json")
|
||||
repro_details = {
|
||||
@@ -2309,8 +2013,6 @@ def test_cmd_first_line():
|
||||
legacy_spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -2325,9 +2027,6 @@ def test_cmd_first_line():
|
||||
mirrors:
|
||||
test-mirror: file:///some/fake/mirror
|
||||
{0}:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
@@ -2379,16 +2078,10 @@ def test_gitlab_ci_deprecated(
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
|
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.compilers
|
||||
import spack.main
|
||||
import spack.version
|
||||
@@ -18,124 +16,8 @@
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compiler_version():
|
||||
return "4.5.3"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_compiler_dir(tmpdir, mock_compiler_version):
|
||||
"""Return a directory containing a fake, but detectable compiler."""
|
||||
|
||||
tmpdir.ensure("bin", dir=True)
|
||||
bin_dir = tmpdir.join("bin")
|
||||
|
||||
gcc_path = bin_dir.join("gcc")
|
||||
gxx_path = bin_dir.join("g++")
|
||||
gfortran_path = bin_dir.join("gfortran")
|
||||
|
||||
gcc_path.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '%s'
|
||||
fi
|
||||
done
|
||||
"""
|
||||
% mock_compiler_version
|
||||
)
|
||||
|
||||
# Create some mock compilers in the temporary directory
|
||||
llnl.util.filesystem.set_executable(str(gcc_path))
|
||||
gcc_path.copy(gxx_path, mode=True)
|
||||
gcc_path.copy(gfortran_path, mode=True)
|
||||
|
||||
return str(tmpdir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
echo "0.0.0"
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# make a script to emulate apple gcc's version args
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
|
||||
# Compilers available by default.
|
||||
old_compilers = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
|
||||
# Ensure new compiler is in there
|
||||
new_compilers = set(spack.compilers.all_compiler_specs())
|
||||
new_compiler = new_compilers - old_compilers
|
||||
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clangdir(tmpdir):
|
||||
"""Create a directory with some dummy compiler scripts in it.
|
||||
def compilers_dir(mock_executable):
|
||||
"""Create a directory with some mock compiler scripts in it.
|
||||
|
||||
Scripts are:
|
||||
- clang
|
||||
@@ -145,11 +27,9 @@ def clangdir(tmpdir):
|
||||
- gfortran-8
|
||||
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("clang", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
clang_path = mock_executable(
|
||||
"clang",
|
||||
output="""
|
||||
if [ "$1" = "--version" ]; then
|
||||
echo "clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
@@ -159,12 +39,11 @@ def clangdir(tmpdir):
|
||||
echo "clang: error: no input files"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
)
|
||||
shutil.copy("clang", "clang++")
|
||||
""",
|
||||
)
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++")
|
||||
|
||||
gcc_script = """\
|
||||
#!/bin/sh
|
||||
gcc_script = """
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "8"
|
||||
elif [ "$1" = "-dumpfullversion" ]; then
|
||||
@@ -178,30 +57,111 @@ def clangdir(tmpdir):
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
with open("gcc-8", "w") as f:
|
||||
f.write(gcc_script.format("gcc", "gcc-8"))
|
||||
with open("g++-8", "w") as f:
|
||||
f.write(gcc_script.format("g++", "g++-8"))
|
||||
with open("gfortran-8", "w") as f:
|
||||
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
os.chmod("clang", 0o700)
|
||||
os.chmod("clang++", 0o700)
|
||||
os.chmod("gcc-8", 0o700)
|
||||
os.chmod("g++-8", 0o700)
|
||||
os.chmod("gfortran-8", 0o700)
|
||||
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
|
||||
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
|
||||
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
|
||||
yield tmpdir
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
is given.
|
||||
"""
|
||||
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
|
||||
Clang with a few tweaks.
|
||||
"""
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output="""
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output=f"""\
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '{expected_version}'
|
||||
fi
|
||||
done
|
||||
""",
|
||||
)
|
||||
bin_dir = gcc_path.parent
|
||||
root_dir = bin_dir.parent
|
||||
|
||||
compilers_before_find = set(spack.compilers.all_compiler_specs())
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
compilers_after_find = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
compilers_added_by_find = compilers_after_find - compilers_before_find
|
||||
assert len(compilers_added_by_find) == 1
|
||||
new_compiler = compilers_added_by_find.pop()
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
@@ -211,39 +171,33 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
gfortran_path = str(clangdir.join("gfortran-8"))
|
||||
gfortran_path = str(compilers_dir / "gfortran-8")
|
||||
|
||||
assert clang["paths"] == {
|
||||
"cc": str(clangdir.join("clang")),
|
||||
"cxx": str(clangdir.join("clang++")),
|
||||
"cc": str(compilers_dir / "clang"),
|
||||
"cxx": str(compilers_dir / "clang++"),
|
||||
# we only auto-detect mixed clang on macos
|
||||
"f77": gfortran_path if sys.platform == "darwin" else None,
|
||||
"fc": gfortran_path if sys.platform == "darwin" else None,
|
||||
}
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("gcc-8")),
|
||||
"cxx": str(clangdir.join("g++-8")),
|
||||
"cc": str(compilers_dir / "gcc-8"),
|
||||
"cxx": str(compilers_dir / "g++-8"),
|
||||
"f77": gfortran_path,
|
||||
"fc": gfortran_path,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
with clangdir.as_cwd():
|
||||
shutil.copy("clang", "clang-gpu")
|
||||
shutil.copy("clang++", "clang++-gpu")
|
||||
os.chmod("clang-gpu", 0o700)
|
||||
os.chmod("clang++-gpu", 0o700)
|
||||
clang_path = compilers_dir / "clang"
|
||||
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
|
||||
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
@@ -252,46 +206,38 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
|
||||
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we find compilers that come first in the PATH first"""
|
||||
|
||||
with clangdir.as_cwd():
|
||||
os.mkdir("first_in_path")
|
||||
shutil.copy("gcc-8", "first_in_path/gcc-8")
|
||||
shutil.copy("g++-8", "first_in_path/g++-8")
|
||||
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
|
||||
|
||||
# the first_in_path folder should be searched first
|
||||
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
new_dir.mkdir()
|
||||
for name in ("gcc-8", "g++-8", "gfortran-8"):
|
||||
shutil.copy(compilers_dir / name, new_dir / name)
|
||||
# Set PATH to have the new folder searched first
|
||||
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("first_in_path", "gcc-8")),
|
||||
"cxx": str(clangdir.join("first_in_path", "g++-8")),
|
||||
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
"cxx": str(new_dir / "g++-8"),
|
||||
"f77": str(new_dir / "gfortran-8"),
|
||||
"fc": str(new_dir / "gfortran-8"),
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
|
||||
# Spack should not automatically search for compilers when listing them and none
|
||||
# are available. And when stdout is not a tty like in tests, there should be no
|
||||
# output and no error exit code.
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
@@ -32,7 +32,7 @@ def check_develop(self, env, spec, path=None):
|
||||
assert dev_specs_entry["spec"] == str(spec)
|
||||
|
||||
# check yaml representation
|
||||
yaml = ev.config_dict(env.manifest)
|
||||
yaml = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
assert spec.name in yaml["develop"]
|
||||
yaml_entry = yaml["develop"][spec.name]
|
||||
assert yaml_entry["spec"] == str(spec)
|
||||
|
@@ -44,9 +44,8 @@ def define_plat_exe(exe):
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
executables_found(
|
||||
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
|
||||
)
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -71,7 +70,7 @@ def test_find_external_two_instances_same_package(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -107,7 +106,7 @@ def test_get_executables(working_env, mock_executable):
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
assert path_to_exe[cmake_path1] == cmake_exe
|
||||
assert path_to_exe[str(cmake_path1)] == cmake_exe
|
||||
|
||||
|
||||
external = SpackCommand("external")
|
||||
@@ -334,7 +333,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
||||
assert "extra_attributes" in external_gcc
|
||||
extra_attributes = external_gcc["extra_attributes"]
|
||||
assert "prefix" not in extra_attributes
|
||||
assert extra_attributes["compilers"]["c"] == gcc_exe
|
||||
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
|
@@ -3,16 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
graph = SpackCommand("graph")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.usefixtures("mock_packages", "database")
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,8 +12,6 @@
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def parser():
|
||||
|
@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
|
||||
|
||||
# Populate the buildcache
|
||||
install(package_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
|
||||
|
||||
# Uninstall the all of the packages for clean slate
|
||||
uninstall("-y", "-a")
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -15,8 +13,6 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
@@ -235,7 +235,7 @@ def test_mirror_destroy(
|
||||
|
||||
# Put a binary package in a buildcache
|
||||
install("--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
@@ -245,7 +245,7 @@ def test_mirror_destroy(
|
||||
|
||||
assert not os.path.exists(mirror_dir.strpath)
|
||||
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
|
@@ -157,7 +157,7 @@ def _parse_types(string):
|
||||
|
||||
|
||||
def test_spec_deptypes_nodes():
|
||||
output = spec("--types", "--cover", "nodes", "dt-diamond")
|
||||
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
|
||||
|
||||
|
||||
def test_spec_deptypes_edges():
|
||||
output = spec("--types", "--cover", "edges", "dt-diamond")
|
||||
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
|
@@ -337,8 +337,6 @@ def test_compiler_flags_differ_identical_compilers(self):
|
||||
|
||||
# Get the compiler that matches the spec (
|
||||
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
|
||||
# Clear cache for compiler config since it has its own cache mechanism outside of config
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
# Configure spack to have two identical compilers with different flags
|
||||
default_dict = spack.compilers._to_dict(compiler)
|
||||
@@ -2137,7 +2135,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"modules": [],
|
||||
}
|
||||
@@ -2172,3 +2170,14 @@ def test_concretization_with_compilers_supporting_target_any(self):
|
||||
with spack.config.override("compilers", compiler_configuration):
|
||||
s = spack.spec.Spec("a").concretized()
|
||||
assert s.satisfies("%gcc@12.1.0")
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
|
||||
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
|
||||
"""Tests that information on virtuals is annotated on DAG edges"""
|
||||
spec = default_mock_concretization(spec_str)
|
||||
mpi_provider = spec["mpi"].name
|
||||
|
||||
edges = spec.edges_to_dependencies(name=mpi_provider)
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
@@ -1669,22 +1669,21 @@ def clear_directive_functions():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_executable(tmpdir):
|
||||
def mock_executable(tmp_path):
|
||||
"""Factory to create a mock executable in a temporary directory that
|
||||
output a custom string when run.
|
||||
"""
|
||||
import jinja2
|
||||
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
f = tmpdir.ensure(*subdir, dir=True).join(name)
|
||||
executable_dir = tmp_path.joinpath(*subdir)
|
||||
executable_dir.mkdir(parents=True, exist_ok=True)
|
||||
executable_path = executable_dir / name
|
||||
if sys.platform == "win32":
|
||||
f += ".bat"
|
||||
t = jinja2.Template("{{ shebang }}{{ output }}\n")
|
||||
f.write(t.render(shebang=shebang, output=output))
|
||||
f.chmod(0o755)
|
||||
return str(f)
|
||||
executable_path = executable_dir / (name + ".bat")
|
||||
executable_path.write_text(f"{ shebang }{ output }\n")
|
||||
executable_path.chmod(0o755)
|
||||
return executable_path
|
||||
|
||||
return _factory
|
||||
|
||||
@@ -1921,3 +1920,21 @@ def _func(spec_str, tests=False):
|
||||
return concretized_specs_cache[key].copy()
|
||||
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def shell_as(shell):
|
||||
if sys.platform != "win32":
|
||||
yield
|
||||
return
|
||||
if shell not in ("pwsh", "bat"):
|
||||
raise RuntimeError("Shell must be one of supported Windows shells (pwsh|bat)")
|
||||
try:
|
||||
# fetch and store old shell type
|
||||
_shell = os.environ.get("SPACK_SHELL", None)
|
||||
os.environ["SPACK_SHELL"] = shell
|
||||
yield
|
||||
finally:
|
||||
# restore old shell if one was set
|
||||
if _shell:
|
||||
os.environ["SPACK_SHELL"] = _shell
|
||||
|
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
_module_raw() { return 1; };
|
||||
module() { return 1; };
|
||||
ml() { return 1; };
|
||||
export -f _module_raw;
|
||||
export -f module;
|
||||
export -f ml;
|
||||
|
||||
export MODULES_AUTO_HANDLING=1
|
||||
export __MODULES_LMCONFLICT=bar&foo
|
||||
export NEW_VAR=new
|
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
Binary file not shown.
@@ -400,7 +400,7 @@ def test_sanitize_literals(env, exclude, include):
|
||||
({"SHLVL": "1"}, ["SH.*"], [], [], ["SHLVL"]),
|
||||
# Check we can include using a regex
|
||||
({"SHLVL": "1"}, ["SH.*"], ["SH.*"], ["SHLVL"], []),
|
||||
# Check regex to exclude Modules v4 related vars
|
||||
# Check regex to exclude Environment Modules related vars
|
||||
(
|
||||
{"MODULES_LMALTNAME": "1", "MODULES_LMCONFLICT": "2"},
|
||||
["MODULES_(.*)"],
|
||||
@@ -415,6 +415,13 @@ def test_sanitize_literals(env, exclude, include):
|
||||
[],
|
||||
["A_modquar", "b_modquar", "C_modshare"],
|
||||
),
|
||||
(
|
||||
{"__MODULES_LMTAG": "1", "__MODULES_LMPREREQ": "2"},
|
||||
["__MODULES_(.*)"],
|
||||
[],
|
||||
[],
|
||||
["__MODULES_LMTAG", "__MODULES_LMPREREQ"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_sanitize_regex(env, exclude, include, expected, deleted):
|
||||
@@ -489,3 +496,19 @@ def test_exclude_lmod_variables():
|
||||
# Check that variables related to lmod are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("LMOD_") for x in modifications)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.regression("13504")
|
||||
def test_exclude_modules_variables():
|
||||
# Construct the list of environment modifications
|
||||
file = os.path.join(datadir, "sourceme_modules.sh")
|
||||
env = EnvironmentModifications.from_sourcing_file(file)
|
||||
|
||||
# Check that variables related to modules are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("__MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_ml") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_module") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC__module_raw") for x in modifications)
|
||||
|
@@ -167,6 +167,46 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend_path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
)
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/share/man", ":")' in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with append_path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv("MANPATH", "/path/to/man")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if 'setenv("FOO", "{{name}}, {name}, {{}}, {}")' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
|
@@ -37,6 +37,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -46,6 +51,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# The configuration file sets the verbose keyword to False
|
||||
@@ -58,6 +68,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_all")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 5
|
||||
assert len([x for x in content if "module load " in x]) == 5
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -67,6 +82,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
def test_prerequisites_direct(self, modulefile_content, module_configuration):
|
||||
@@ -103,6 +123,7 @@ def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
|
||||
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
|
||||
assert len([x for x in content if "unsetenv BAR" in x]) == 0
|
||||
assert len([x for x in content if "depends-on foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "module load foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
|
||||
|
||||
@@ -121,6 +142,55 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend-path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend-path --delim ":" MANPATH "/path/to/man"' in x])
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in content
|
||||
if 'prepend-path --delim ":" MANPATH "/path/to/share/man"' in x
|
||||
]
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with append-path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert (
|
||||
len([x for x in content if 'append-path --delim ":" MANPATH "/path/to/man"' in x]) == 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv MANPATH "/path/to/man"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if 'setenv FOO "{{name}}, {name}, {{}}, {}"' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -394,10 +464,16 @@ def test_autoload_with_constraints(self, modulefile_content, module_configuratio
|
||||
|
||||
# Test the mpileaks that should have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich2")
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# Test the mpileaks that should NOT have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich")
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 0
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 0
|
||||
assert len([x for x in content if "module load " in x]) == 0
|
||||
|
||||
def test_modules_no_arch(self, factory, module_configuration):
|
||||
|
@@ -100,7 +100,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
parser = argparse.ArgumentParser()
|
||||
buildcache.setup_parser(parser)
|
||||
|
||||
create_args = ["create", "-a", "-f", "-d", mirror_path, pkghash]
|
||||
create_args = ["create", "-a", "-f", mirror_path, pkghash]
|
||||
# Create a private key to sign package with if gpg2 available
|
||||
spack.util.gpg.create(
|
||||
name="test key 1", expires="0", email="spack@googlegroups.com", comment="Spack test key"
|
||||
@@ -116,7 +116,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
install_args = ["install", "-a", "-f", pkghash]
|
||||
install_args = ["install", "-f", pkghash]
|
||||
args = parser.parse_args(install_args)
|
||||
# Test install
|
||||
buildcache.buildcache(parser, args)
|
||||
@@ -131,30 +131,6 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
assert buildinfo["relocate_textfiles"] == ["dummy.txt"]
|
||||
assert buildinfo["relocate_links"] == ["link_to_dummy.txt"]
|
||||
|
||||
# create build cache with relative path
|
||||
create_args.insert(create_args.index("-a"), "-f")
|
||||
create_args.insert(create_args.index("-a"), "-r")
|
||||
args = parser.parse_args(create_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# test overwrite install
|
||||
install_args.insert(install_args.index("-a"), "-f")
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
files = os.listdir(spec.prefix)
|
||||
assert "link_to_dummy.txt" in files
|
||||
assert "dummy.txt" in files
|
||||
# assert os.path.realpath(
|
||||
# os.path.join(spec.prefix, 'link_to_dummy.txt')
|
||||
# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
|
||||
|
||||
args = parser.parse_args(["keys"])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
|
@@ -125,7 +125,7 @@ def _mock_installed(self):
|
||||
# use the installed C. It should *not* force A to use the installed D
|
||||
# *if* we're doing a fresh installation.
|
||||
a_spec = Spec(a)
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
assert spack.version.Version("2") == a_spec[c][d].version
|
||||
assert spack.version.Version("2") == a_spec[e].version
|
||||
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
@@ -989,9 +989,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
|
||||
link_run_spec = Spec("c@=1.0").concretized()
|
||||
build_spec = Spec("c@=2.0").concretized()
|
||||
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run")
|
||||
root.add_dependency_edge(build_spec, deptypes="build")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link", virtuals=())
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run", virtuals=())
|
||||
root.add_dependency_edge(build_spec, deptypes="build", virtuals=())
|
||||
|
||||
# Check dependencies from the perspective of root
|
||||
assert len(root.dependencies()) == 2
|
||||
@@ -1017,7 +1017,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||
root = Spec("b@=2.0").concretized()
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
root.add_dependency_edge(bootstrap, deptypes="build")
|
||||
root.add_dependency_edge(bootstrap, deptypes="build", virtuals=())
|
||||
|
||||
assert len(root.dependencies()) == 1
|
||||
assert root.dependencies()[0].name == "b"
|
||||
@@ -1036,7 +1036,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
for current_deptype in ("build", "link", "run"):
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype, virtuals=())
|
||||
|
||||
# Check edges in dependencies
|
||||
assert len(root.edges_to_dependencies()) == 1
|
||||
@@ -1063,9 +1063,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
|
||||
c1 = Spec("b@=1.0").concretized()
|
||||
c2 = Spec("b@=2.0").concretized()
|
||||
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes)
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes, virtuals=())
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes)
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes, virtuals=())
|
||||
|
||||
|
||||
@pytest.mark.regression("33499")
|
||||
@@ -1084,16 +1084,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
|
||||
z3_flavor_1 = Spec("z3 +through_a1")
|
||||
z3_flavor_2 = Spec("z3 +through_z1")
|
||||
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"), virtuals=())
|
||||
|
||||
# unique package as a dep of a build/run/test type dep.
|
||||
a1.add_dependency_edge(a2, deptypes="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
|
||||
a1.add_dependency_edge(a2, deptypes="all", virtuals=())
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all", virtuals=())
|
||||
|
||||
# chain of link type deps root -> z1 -> z2 -> z3
|
||||
root.add_dependency_edge(z1, deptypes="link")
|
||||
z1.add_dependency_edge(z2, deptypes="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
|
||||
root.add_dependency_edge(z1, deptypes="link", virtuals=())
|
||||
z1.add_dependency_edge(z2, deptypes="link", virtuals=())
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link", virtuals=())
|
||||
|
||||
# Indexing should prefer the link-type dep.
|
||||
assert "through_z1" in root["z3"].variants
|
||||
|
@@ -971,7 +971,7 @@ def test_error_message_unknown_variant(self):
|
||||
def test_satisfies_dependencies_ordered(self):
|
||||
d = Spec("zmpi ^fake")
|
||||
s = Spec("mpileaks")
|
||||
s._add_dependency(d, deptypes=())
|
||||
s._add_dependency(d, deptypes=(), virtuals=())
|
||||
assert s.satisfies("mpileaks ^zmpi ^fake")
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -1018,6 +1018,7 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="malformed"):
|
||||
Spec.from_dict(
|
||||
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
|
||||
@@ -1025,6 +1026,7 @@ def test_malformed_spec_dict():
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="Couldn't parse"):
|
||||
Spec.from_dict(
|
||||
{
|
||||
@@ -1118,7 +1120,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, deptypes=())
|
||||
top.add_dependency_edge(bottom, deptypes=(), virtuals=())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
@@ -43,12 +43,6 @@ def check_json_round_trip(spec):
|
||||
assert spec.eq_dag(spec_from_json)
|
||||
|
||||
|
||||
def test_simple_spec():
|
||||
spec = Spec("mpileaks")
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_read_spec_from_signed_json():
|
||||
spec_dir = os.path.join(spack.paths.test_path, "data", "mirrors", "signed_json")
|
||||
file_name = (
|
||||
@@ -70,13 +64,6 @@ def check_spec(spec_to_check):
|
||||
check_spec(s)
|
||||
|
||||
|
||||
def test_normal_spec(mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
|
||||
)
|
||||
@@ -95,37 +82,28 @@ def test_invalid_json_spec(invalid_json, error_message):
|
||||
assert error_message in exc_msg
|
||||
|
||||
|
||||
def test_external_spec(config, mock_packages):
|
||||
spec = Spec("externaltool")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
spec = Spec("externaltest")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_ambiguous_version_spec(mock_packages):
|
||||
spec = Spec("mpileaks@1.0:5.0,6.1,7.3+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_concrete_spec(config, mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_yaml_multivalue(config, mock_packages):
|
||||
spec = Spec('multivalue-variant foo="bar,baz"')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
@pytest.mark.parametrize(
|
||||
"abstract_spec",
|
||||
[
|
||||
# Externals
|
||||
"externaltool",
|
||||
"externaltest",
|
||||
# Ambiguous version spec
|
||||
"mpileaks@1.0:5.0,6.1,7.3+debug~opt",
|
||||
# Variants
|
||||
"mpileaks+debug~opt",
|
||||
'multivalue-variant foo="bar,baz"',
|
||||
# Virtuals on edges
|
||||
"callpath",
|
||||
"mpileaks",
|
||||
],
|
||||
)
|
||||
def test_roundtrip_concrete_specs(abstract_spec, default_mock_concretization):
|
||||
check_yaml_round_trip(Spec(abstract_spec))
|
||||
check_json_round_trip(Spec(abstract_spec))
|
||||
concrete_spec = default_mock_concretization(abstract_spec)
|
||||
check_yaml_round_trip(concrete_spec)
|
||||
check_json_round_trip(concrete_spec)
|
||||
|
||||
|
||||
def test_yaml_subdag(config, mock_packages):
|
||||
@@ -506,6 +484,8 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
|
||||
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
|
||||
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
|
||||
# Add properties on edges, see https://github.com/spack/spack/pull/34821
|
||||
("specfiles/hdf5.v020.json.gz", "vlirlcgazhvsvtundz4kug75xkkqqgou", spack.spec.SpecfileV4),
|
||||
],
|
||||
)
|
||||
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
|
@@ -19,7 +19,7 @@ def create_dag(nodes, edges):
|
||||
"""
|
||||
specs = {name: Spec(name) for name in nodes}
|
||||
for parent, child, deptypes in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes, virtuals=())
|
||||
return specs
|
||||
|
||||
|
||||
|
@@ -113,13 +113,16 @@ def test_path_put_first(prepare_environment_for_tests):
|
||||
assert envutil.get_path("TEST_ENV_VAR") == expected
|
||||
|
||||
|
||||
def test_dump_environment(prepare_environment_for_tests, tmpdir):
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
def test_dump_environment(prepare_environment_for_tests, shell_as, shell, tmpdir):
|
||||
test_paths = "/a:/b/x:/b/c"
|
||||
os.environ["TEST_ENV_VAR"] = test_paths
|
||||
dumpfile_path = str(tmpdir.join("envdump.txt"))
|
||||
envutil.dump_environment(dumpfile_path)
|
||||
with open(dumpfile_path, "r") as dumpfile:
|
||||
if sys.platform == "win32":
|
||||
if shell == "pwsh":
|
||||
assert "$Env:TEST_ENV_VAR={}\n".format(test_paths) in list(dumpfile)
|
||||
elif shell == "bat":
|
||||
assert 'set "TEST_ENV_VAR={}"\n'.format(test_paths) in list(dumpfile)
|
||||
else:
|
||||
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
||||
@@ -164,11 +167,14 @@ def test_escape_double_quotes_in_shell_modifications():
|
||||
|
||||
to_validate.set("QUOTED_VAR", '"MY_VAL"')
|
||||
|
||||
cmds = to_validate.shell_modifications()
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.platform == "win32":
|
||||
cmds = to_validate.shell_modifications(shell="bat")
|
||||
assert r'set "VAR=$PATH;$ANOTHER_PATH"' in cmds
|
||||
assert r'set "QUOTED_VAR="MY_VAL"' in cmds
|
||||
cmds = to_validate.shell_modifications(shell="pwsh")
|
||||
assert r"$Env:VAR=$PATH;$ANOTHER_PATH" in cmds
|
||||
assert r'$Env:QUOTED_VAR="MY_VAL"' in cmds
|
||||
else:
|
||||
cmds = to_validate.shell_modifications()
|
||||
assert 'export VAR="$PATH:$ANOTHER_PATH"' in cmds
|
||||
assert r'export QUOTED_VAR="\"MY_VAL\""' in cmds
|
||||
else:
|
||||
assert "export VAR=$PATH;$ANOTHER_PATH" in cmds
|
||||
assert r'export QUOTED_VAR="MY_VAL"' in cmds
|
||||
|
@@ -337,15 +337,15 @@ def test_remove_complex_package_logic_filtered():
|
||||
("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
|
||||
("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
|
||||
# has @when("@4.1.0") and raw unicode literals
|
||||
("mfem", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
|
||||
("mfem@4.0.0", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
|
||||
("mfem@4.1.0", "uit2ydzhra3b2mlvnq262qlrqqmuwq3d"),
|
||||
("mfem", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
|
||||
("mfem@4.0.0", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
|
||||
("mfem@4.1.0", "vjdjdgjt6nyo7ited2seki5epggw5gza"),
|
||||
# has @when("@1.5.0:")
|
||||
("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
|
||||
# has a print with multiple arguments
|
||||
("legion", "sffy6vz3dusxnxeetofoomlaieukygoj"),
|
||||
("legion", "efpfd2c4pzhsbyc3o7plqcmtwm6b57yh"),
|
||||
# has nested `with when()` blocks and loops
|
||||
("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
|
||||
],
|
||||
|
@@ -935,7 +935,7 @@ def test_inclusion_upperbound():
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_git_version_repo_attached_after_serialization(
|
||||
mock_git_version_info, mock_packages, monkeypatch
|
||||
mock_git_version_info, mock_packages, config, monkeypatch
|
||||
):
|
||||
"""Test that a GitVersion instance can be serialized and deserialized
|
||||
without losing its repository reference.
|
||||
@@ -954,7 +954,9 @@ def test_git_version_repo_attached_after_serialization(
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packages, monkeypatch):
|
||||
def test_resolved_git_version_is_shown_in_str(
|
||||
mock_git_version_info, mock_packages, config, monkeypatch
|
||||
):
|
||||
"""Test that a GitVersion from a commit without a user supplied version is printed
|
||||
as <hash>=<version>, and not just <hash>."""
|
||||
repo_path, _, commits = mock_git_version_info
|
||||
@@ -968,7 +970,7 @@ def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packag
|
||||
assert str(spec.version) == f"{commit}=1.0-git.1"
|
||||
|
||||
|
||||
def test_unresolvable_git_versions_error(mock_packages):
|
||||
def test_unresolvable_git_versions_error(config, mock_packages):
|
||||
"""Test that VersionLookupError is raised when a git prop is not set on a package."""
|
||||
with pytest.raises(VersionLookupError):
|
||||
# The package exists, but does not have a git property set. When dereferencing
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -34,7 +33,6 @@ def _create_url(relative_url):
|
||||
root_with_fragment = _create_url("index_with_fragment.html")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"depth,expected_found,expected_not_found,expected_text",
|
||||
[
|
||||
@@ -99,20 +97,17 @@ def test_spider_no_response(monkeypatch):
|
||||
assert not pages and not links
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_0():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=0)
|
||||
assert Version("0.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_1():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=1)
|
||||
assert Version("0.0.0") in versions
|
||||
assert Version("1.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -120,14 +115,12 @@ def test_find_versions_of_archive_2():
|
||||
assert Version("2.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
# up for grabs to make this better.
|
||||
assert Version("2.0.0b2") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -137,7 +130,6 @@ def test_find_versions_of_archive_3():
|
||||
assert Version("4.5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("2.0.0b2") in versions
|
||||
@@ -145,7 +137,6 @@ def test_find_exotic_versions_of_archive_3():
|
||||
assert Version("4.5-rc5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_with_fragment():
|
||||
versions = spack.util.web.find_versions_of_archive(
|
||||
root_tarball, root_with_fragment, list_depth=0
|
||||
@@ -206,7 +197,6 @@ def test_etag_parser():
|
||||
assert spack.util.web.parse_etag("abc def") is None
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_list_url(tmpdir):
|
||||
testpath = str(tmpdir)
|
||||
testpath_url = url_util.path_to_file_url(testpath)
|
||||
|
@@ -211,7 +211,9 @@ def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visit
|
||||
def with_artificial_edges(specs):
|
||||
"""Initialize a list of edges from an imaginary root node to the root specs."""
|
||||
return [
|
||||
EdgeAndDepth(edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=()), depth=0)
|
||||
EdgeAndDepth(
|
||||
edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=(), virtuals=()), depth=0
|
||||
)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
@@ -47,6 +47,7 @@
|
||||
"csh": "setenv {0} {1};\n",
|
||||
"fish": "set -gx {0} {1};\n",
|
||||
"bat": 'set "{0}={1}"\n',
|
||||
"pwsh": "$Env:{0}={1}\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +56,7 @@
|
||||
"csh": "unsetenv {0};\n",
|
||||
"fish": "set -e {0};\n",
|
||||
"bat": 'set "{0}="\n',
|
||||
"pwsh": "Remove-Item Env:{0}\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -172,7 +174,9 @@ def path_put_first(var_name: str, directories: List[Path]):
|
||||
|
||||
|
||||
def _win_env_var_to_set_line(var: str, val: str) -> str:
|
||||
return f'set "{var}={val}"'
|
||||
is_pwsh = os.environ.get("SPACK_SHELL", None) == "pwsh"
|
||||
env_set_phrase = f"$Env:{var}={val}" if is_pwsh else f'set "{var}={val}"'
|
||||
return env_set_phrase
|
||||
|
||||
|
||||
def _nix_env_var_to_source_line(var: str, val: str) -> str:
|
||||
@@ -351,13 +355,20 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
|
||||
|
||||
class SetEnv(NameValueModifier):
|
||||
__slots__ = ("force",)
|
||||
__slots__ = ("force", "raw")
|
||||
|
||||
def __init__(
|
||||
self, name: str, value: str, *, trace: Optional[Trace] = None, force: bool = False
|
||||
self,
|
||||
name: str,
|
||||
value: str,
|
||||
*,
|
||||
trace: Optional[Trace] = None,
|
||||
force: bool = False,
|
||||
raw: bool = False,
|
||||
):
|
||||
super().__init__(name, value, trace=trace)
|
||||
self.force = force
|
||||
self.raw = raw
|
||||
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"SetEnv: {self.name}={str(self.value)}", level=3)
|
||||
@@ -501,15 +512,16 @@ def _trace(self) -> Optional[Trace]:
|
||||
return Trace(filename=filename, lineno=lineno, context=current_context)
|
||||
|
||||
@system_env_normalize
|
||||
def set(self, name: str, value: str, *, force: bool = False):
|
||||
def set(self, name: str, value: str, *, force: bool = False, raw: bool = False):
|
||||
"""Stores a request to set an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable
|
||||
value: value of the environment variable
|
||||
force: if True, audit will not consider this modification a warning
|
||||
raw: if True, format of value string is skipped
|
||||
"""
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force)
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force, raw=raw)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
@@ -685,7 +697,7 @@ def apply_modifications(self, env: Optional[MutableMapping[str, str]] = None):
|
||||
|
||||
def shell_modifications(
|
||||
self,
|
||||
shell: str = "sh",
|
||||
shell: str = "sh" if sys.platform != "win32" else os.environ.get("SPACK_SHELL", "bat"),
|
||||
explicit: bool = False,
|
||||
env: Optional[MutableMapping[str, str]] = None,
|
||||
) -> str:
|
||||
@@ -768,16 +780,21 @@ def from_sourcing_file(
|
||||
"PS1",
|
||||
"PS2",
|
||||
"ENV",
|
||||
# Environment modules v4
|
||||
# Environment Modules or Lmod
|
||||
"LOADEDMODULES",
|
||||
"_LMFILES_",
|
||||
"BASH_FUNC_module()",
|
||||
"MODULEPATH",
|
||||
"MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod configuration
|
||||
r"LMOD_(.*)",
|
||||
"MODULERCFILE",
|
||||
"BASH_FUNC_ml()",
|
||||
"BASH_FUNC_module()",
|
||||
# Environment Modules-specific configuration
|
||||
"MODULESHOME",
|
||||
"BASH_FUNC__module_raw()",
|
||||
r"MODULES_(.*)",
|
||||
r"__MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod-specific configuration
|
||||
r"LMOD_(.*)",
|
||||
]
|
||||
)
|
||||
|
||||
|
@@ -87,28 +87,11 @@ def __init__(self, py_ver_consistent=False, _avoid_backslashes=False):
|
||||
|
||||
Arguments:
|
||||
py_ver_consistent (bool): if True, generate unparsed code that is
|
||||
consistent between Python 2.7 and 3.5-3.10.
|
||||
|
||||
Consistency is achieved by:
|
||||
1. Ensuring that *args and **kwargs are always the last arguments,
|
||||
regardless of the python version, because Python 2's AST does not
|
||||
have sufficient information to reconstruct star-arg order.
|
||||
2. Always unparsing print as a function.
|
||||
3. Unparsing Python3 unicode literals the way Python 2 would.
|
||||
|
||||
Without these changes, the same source can generate different code for Python 2
|
||||
and Python 3, depending on subtle AST differences. The first of these two
|
||||
causes this module to behave differently from Python 3.8+'s `ast.unparse()`
|
||||
|
||||
One place where single source will generate an inconsistent AST is with
|
||||
multi-argument print statements, e.g.::
|
||||
|
||||
print("foo", "bar", "baz")
|
||||
|
||||
In Python 2, this prints a tuple; in Python 3, it is the print function with
|
||||
multiple arguments. Use ``from __future__ import print_function`` to avoid
|
||||
this inconsistency.
|
||||
consistent between Python versions 3.5-3.11.
|
||||
|
||||
For legacy reasons, consistency is achieved by unparsing Python3 unicode literals
|
||||
the way Python 2 would. This preserved Spack package hash consistency during the
|
||||
python2/3 transition
|
||||
"""
|
||||
self.future_imports = []
|
||||
self._indent = 0
|
||||
@@ -299,61 +282,6 @@ def visit_Exec(self, node):
|
||||
self.write(", ")
|
||||
self.dispatch(node.locals)
|
||||
|
||||
def visit_Print(self, node):
|
||||
# Use print function so that python 2 unparsing is consistent with 3
|
||||
if self._py_ver_consistent:
|
||||
self.fill("print")
|
||||
with self.delimit("(", ")"):
|
||||
values = node.values
|
||||
|
||||
# Can't tell print(foo, bar, baz) and print((foo, bar, baz)) apart in
|
||||
# python 2 and 3, so treat them the same to make hashes consistent.
|
||||
# Single-tuple print are rare and unlikely to affect package hashes,
|
||||
# esp. as they likely print to stdout.
|
||||
if len(values) == 1 and isinstance(values[0], ast.Tuple):
|
||||
values = node.values[0].elts
|
||||
|
||||
do_comma = False
|
||||
for e in values:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if not node.nl:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.write("end=''")
|
||||
|
||||
if node.dest:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.write("file=")
|
||||
self.dispatch(node.dest)
|
||||
|
||||
else:
|
||||
# unparse Python 2 print statements
|
||||
self.fill("print ")
|
||||
|
||||
do_comma = False
|
||||
if node.dest:
|
||||
self.write(">>")
|
||||
self.dispatch(node.dest)
|
||||
do_comma = True
|
||||
for e in node.values:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.dispatch(e)
|
||||
if not node.nl:
|
||||
self.write(",")
|
||||
|
||||
def visit_Global(self, node):
|
||||
self.fill("global ")
|
||||
interleave(lambda: self.write(", "), self.write, node.names)
|
||||
@@ -962,65 +890,28 @@ def visit_Call(self, node):
|
||||
self.set_precedence(_Precedence.ATOM, node.func)
|
||||
|
||||
args = node.args
|
||||
if self._py_ver_consistent:
|
||||
# make print(a, b, c) and print((a, b, c)) equivalent, since you can't
|
||||
# tell them apart between Python 2 and 3. See _Print() for more details.
|
||||
if getattr(node.func, "id", None) == "print":
|
||||
if len(node.args) == 1 and isinstance(node.args[0], ast.Tuple):
|
||||
args = node.args[0].elts
|
||||
|
||||
self.dispatch(node.func)
|
||||
|
||||
with self.delimit("(", ")"):
|
||||
comma = False
|
||||
|
||||
# starred arguments last in Python 3.5+, for consistency w/earlier versions
|
||||
star_and_kwargs = []
|
||||
move_stars_last = sys.version_info[:2] >= (3, 5)
|
||||
# NOTE: this code is no longer compatible with python versions 2.7:3.4
|
||||
# If you run on python@:3.4, you will see instability in package hashes
|
||||
# across python versions
|
||||
|
||||
for e in args:
|
||||
if move_stars_last and isinstance(e, ast.Starred):
|
||||
star_and_kwargs.append(e)
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
for e in node.keywords:
|
||||
# starting from Python 3.5 this denotes a kwargs part of the invocation
|
||||
if e.arg is None and move_stars_last:
|
||||
star_and_kwargs.append(e)
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if move_stars_last:
|
||||
for e in star_and_kwargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if sys.version_info[:2] < (3, 5):
|
||||
if node.starargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.write("*")
|
||||
self.dispatch(node.starargs)
|
||||
if node.kwargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.write("**")
|
||||
self.dispatch(node.kwargs)
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
def visit_Subscript(self, node):
|
||||
self.set_precedence(_Precedence.ATOM, node.value)
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from html.parser import HTMLParser
|
||||
from pathlib import Path, PurePosixPath
|
||||
from urllib.error import URLError
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
@@ -498,7 +499,8 @@ def list_url(url, recursive=False):
|
||||
|
||||
if local_path:
|
||||
if recursive:
|
||||
return list(_iter_local_prefix(local_path))
|
||||
# convert backslash to forward slash as required for URLs
|
||||
return [str(PurePosixPath(Path(p))) for p in list(_iter_local_prefix(local_path))]
|
||||
return [
|
||||
subpath
|
||||
for subpath in os.listdir(local_path)
|
||||
@@ -738,7 +740,8 @@ def find_versions_of_archive(
|
||||
|
||||
# We'll be a bit more liberal and just look for the archive
|
||||
# part, not the full path.
|
||||
url_regex = os.path.basename(url_regex)
|
||||
# this is a URL so it is a posixpath even on Windows
|
||||
url_regex = PurePosixPath(url_regex).name
|
||||
|
||||
# We need to add a / to the beginning of the regex to prevent
|
||||
# Spack from picking up similarly named packages like:
|
||||
|
@@ -4,6 +4,11 @@ default:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
|
||||
|
||||
# CI Platform-Arch
|
||||
.cray_zen4:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "cray"
|
||||
SPACK_TARGET_ARCH: "zen4"
|
||||
|
||||
.darwin_x86_64:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "darwin"
|
||||
@@ -324,7 +329,7 @@ gpu-tests-build:
|
||||
|
||||
e4s-oneapi-generate:
|
||||
extends: [ ".e4s-oneapi", ".generate"]
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023.06.01
|
||||
|
||||
e4s-oneapi-build:
|
||||
extends: [ ".e4s-oneapi", ".build" ]
|
||||
@@ -731,12 +736,15 @@ deprecated-ci-build:
|
||||
# Use gcc from local container buildcache
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack buildcache rebuild-index /bootstrap/local-cache/
|
||||
- spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- cd "${CI_PROJECT_DIR}" && curl -sOL https://raw.githubusercontent.com/spack/spack-configs/main/AWS/parallelcluster/postinstall.sh
|
||||
- sed -i -e "s/spack arch -t/echo ${SPACK_TARGET_ARCH}/g" postinstall.sh
|
||||
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ postinstall.sh
|
||||
- diff postinstall.sh postinstall.sh.bkp || echo Done
|
||||
- /bin/bash postinstall.sh -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
after_script:
|
||||
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
|
||||
|
||||
@@ -815,3 +823,89 @@ aws-pcluster-build-neoverse_v1:
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-neoverse_v1
|
||||
|
||||
# Cray definitions
|
||||
.base-cray-job:
|
||||
variables:
|
||||
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-cray/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
AWS_ACCESS_KEY_ID: ${CRAY_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${CRAY_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_NAME == "develop"
|
||||
# Pipelines on develop only rebuild what is missing from the mirror
|
||||
when: always
|
||||
variables:
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^pr[\d]+_.*$/
|
||||
# Pipelines on PR branches rebuild only what's missing, and do extra pruning
|
||||
when: always
|
||||
variables:
|
||||
SPACK_PIPELINE_TYPE: "spack_pull_request"
|
||||
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-cray/prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
SPACK_PRUNE_UNTOUCHED: "True"
|
||||
SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH: "1"
|
||||
|
||||
.generate-cray:
|
||||
tags: [ "cce@15.0.1", "cray-zen4", "public" ]
|
||||
extends: [ ".base-cray-job" ]
|
||||
stage: generate
|
||||
script:
|
||||
- echo $PATH
|
||||
- module avail
|
||||
- module list
|
||||
- export SPACK_DISABLE_LOCAL_CONFIG=1
|
||||
- export SPACK_USER_CACHE_PATH=$(pwd)/_user_cache
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack env activate --without-view .
|
||||
- export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs"
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
ci generate --check-index-only
|
||||
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
interruptible: true
|
||||
timeout: 60 minutes
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- always
|
||||
|
||||
.build-cray:
|
||||
extends: [ ".base-cray-job" ]
|
||||
stage: build
|
||||
|
||||
#######################################
|
||||
# E4S - Cray
|
||||
#######################################
|
||||
.e4s-cray:
|
||||
extends: [ ".cray_zen4" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray
|
||||
|
||||
e4s-cray-generate:
|
||||
extends: [ ".generate-cray", ".e4s-cray" ]
|
||||
|
||||
e4s-cray-build:
|
||||
extends: [ ".build-cray", ".e4s-cray" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: e4s-cray-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-cray-generate
|
295
share/spack/gitlab/cloud_pipelines/configs/cray/ci.yaml
Normal file
295
share/spack/gitlab/cloud_pipelines/configs/cray/ci.yaml
Normal file
@@ -0,0 +1,295 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job-remove:
|
||||
image: no-image
|
||||
- build-job:
|
||||
script+:
|
||||
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
|
||||
- if [[ -r /etc/protected-runner/e4s.gpg ]]; then spack gpg trust /etc/protected-runner/e4s.gpg; fi
|
||||
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
|
||||
- if [[ -r /etc/protected-runner/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /etc/protected-runner/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /etc/protected-runner/spack_public_key.gpg ]]; then spack gpg trust /etc/protected-runner/spack_public_key.gpg; fi
|
||||
- match_behavior: first
|
||||
submapping:
|
||||
- match:
|
||||
- hipblas
|
||||
- llvm
|
||||
- llvm-amdgpu
|
||||
- pango
|
||||
- paraview
|
||||
- py-tensorflow
|
||||
- py-torch
|
||||
- qt
|
||||
- rocblas
|
||||
- visit
|
||||
build-job:
|
||||
tags: [ "spack", "huge" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: huge
|
||||
SPACK_BUILD_JOBS: "12"
|
||||
|
||||
- match:
|
||||
- ascent
|
||||
- atk
|
||||
- axom
|
||||
- cistem
|
||||
- cmake
|
||||
- ctffind
|
||||
- cuda
|
||||
- dealii
|
||||
- dray
|
||||
- dyninst
|
||||
- ecp-data-vis-sdk
|
||||
- gcc
|
||||
- ginkgo
|
||||
- hdf5
|
||||
- hpx
|
||||
- kokkos-kernels
|
||||
- kokkos-nvcc-wrapper
|
||||
- lbann
|
||||
- magma
|
||||
- mesa
|
||||
- mfem
|
||||
- mpich
|
||||
- netlib-lapack
|
||||
- nvhpc
|
||||
- oce
|
||||
- openblas
|
||||
- openfoam
|
||||
- openturns
|
||||
- parallelio
|
||||
- plumed
|
||||
- precice
|
||||
#- py-tensorflow
|
||||
#- qt
|
||||
- raja
|
||||
- relion
|
||||
#- rocblas
|
||||
- rocfft
|
||||
- rocsolver
|
||||
- rocsparse
|
||||
- rust
|
||||
- slate
|
||||
- strumpack
|
||||
- sundials
|
||||
- trilinos
|
||||
- umpire
|
||||
#- visit
|
||||
- vtk
|
||||
- vtk-h
|
||||
- vtk-m
|
||||
- warpx
|
||||
- wrf
|
||||
- wxwidgets
|
||||
build-job:
|
||||
tags: [ "spack", "large" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: large
|
||||
SPACK_BUILD_JOBS: "8"
|
||||
|
||||
- match:
|
||||
- adios2
|
||||
- amrex
|
||||
- archer
|
||||
- ascent
|
||||
- autoconf-archive
|
||||
- axom
|
||||
- binutils
|
||||
- blaspp
|
||||
- blt
|
||||
- boost
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
- camp
|
||||
- chai
|
||||
- conduit
|
||||
- curl
|
||||
- datatransferkit
|
||||
- double-conversion
|
||||
- dray
|
||||
- eigen
|
||||
- faodel
|
||||
- ffmpeg
|
||||
- fftw
|
||||
- fortrilinos
|
||||
- gettext
|
||||
- gperftools
|
||||
- gptune
|
||||
- hdf5
|
||||
- heffte
|
||||
- hpctoolkit
|
||||
- hwloc
|
||||
- hydrogen
|
||||
- hypre
|
||||
- kokkos
|
||||
- lammps
|
||||
- lapackpp
|
||||
- legion
|
||||
- libtool
|
||||
- libxml2
|
||||
- libzmq
|
||||
- llvm-openmp-ompt
|
||||
- mbedtls
|
||||
- mfem
|
||||
- mpich
|
||||
- mvapich2
|
||||
- nasm
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openblas
|
||||
- openjpeg
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- pagmo2
|
||||
- papyrus
|
||||
- parsec
|
||||
- pdt
|
||||
- pegtl
|
||||
- petsc
|
||||
- pumi
|
||||
- py-beniget
|
||||
- py-cinemasci
|
||||
- pygmo
|
||||
- py-ipython-genutils
|
||||
- py-packaging
|
||||
- py-petsc4py
|
||||
- py-scipy
|
||||
- py-statsmodels
|
||||
- py-warlock
|
||||
- py-warpx
|
||||
- raja
|
||||
- samrai
|
||||
- slepc
|
||||
- slurm
|
||||
- sqlite
|
||||
- strumpack
|
||||
- sundials
|
||||
- superlu-dist
|
||||
- tasmanian
|
||||
- tau
|
||||
- upcxx
|
||||
- vtk
|
||||
- vtk-h
|
||||
- vtk-m
|
||||
- zfp
|
||||
build-job:
|
||||
tags: [ "spack", "medium" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "medium"
|
||||
SPACK_BUILD_JOBS: "2"
|
||||
|
||||
- match:
|
||||
- alsa-lib
|
||||
- ant
|
||||
- antlr
|
||||
- argobots
|
||||
- autoconf-archive
|
||||
- automake
|
||||
- berkeley-db
|
||||
- bison
|
||||
- blt
|
||||
- bzip2
|
||||
- camp
|
||||
- cmake
|
||||
- curl
|
||||
- czmq
|
||||
- darshan-util
|
||||
- diffutils
|
||||
- docbook-xml
|
||||
- exmcutils
|
||||
- expat
|
||||
- findutils
|
||||
- flit
|
||||
- freetype
|
||||
- gawk
|
||||
- gdbm
|
||||
- gettext
|
||||
- glib
|
||||
- gmake
|
||||
- gotcha
|
||||
- hpcviewer
|
||||
- hwloc
|
||||
- jansson
|
||||
- json-c
|
||||
- libbsd
|
||||
- libedit
|
||||
- libevent
|
||||
- libfabric
|
||||
- libffi
|
||||
- libgcrypt
|
||||
- libiconv
|
||||
- libidn2
|
||||
- libjpeg-turbo
|
||||
- libmd
|
||||
- libnrm
|
||||
- libpciaccess
|
||||
- libpng
|
||||
- libsigsegv
|
||||
- libsodium
|
||||
- libunistring
|
||||
- libunwind
|
||||
- libxml2
|
||||
- libyaml
|
||||
- libzmq
|
||||
- lua
|
||||
- lua-luaposix
|
||||
- lz4
|
||||
- m4
|
||||
- meson
|
||||
- metis
|
||||
- mpfr
|
||||
- ncurses
|
||||
- ninja
|
||||
- numactl
|
||||
- openblas
|
||||
- openjdk
|
||||
- openssh
|
||||
- openssl
|
||||
- papi
|
||||
- parallel-netcdf
|
||||
- pcre
|
||||
- pcre2
|
||||
- pdsh
|
||||
- perl
|
||||
- perl-data-dumper
|
||||
- pkgconf
|
||||
- py-alembic
|
||||
- py-cffi
|
||||
- py-cycler
|
||||
- py-decorator
|
||||
- py-idna
|
||||
- py-jsonschema
|
||||
- py-kiwisolver
|
||||
- py-mistune
|
||||
- py-pycparser
|
||||
- py-setuptools
|
||||
- py-setuptools-scm
|
||||
- py-six
|
||||
- py-testpath
|
||||
- py-wheel
|
||||
- qhull
|
||||
- readline
|
||||
- sed
|
||||
- slurm
|
||||
- snappy
|
||||
- sqlite
|
||||
- superlu
|
||||
- swig
|
||||
- tar
|
||||
- tcl
|
||||
- texinfo
|
||||
- tut
|
||||
- unzip
|
||||
- util-linux-uuid
|
||||
- util-macros
|
||||
- xz
|
||||
- yaml-cpp
|
||||
- zfp
|
||||
- zlib
|
||||
- zstd
|
||||
build-job:
|
||||
tags: [ "spack", "small" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "small"
|
||||
SPACK_BUILD_JOBS: "1"
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user