Compare commits
504 Commits
load-run-d
...
devtools-r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ddc5dc7da3 | ||
|
|
0663ac3633 | ||
|
|
f80df0ca47 | ||
|
|
367abcb801 | ||
|
|
8fc1ba2d7a | ||
|
|
668a5b45e5 | ||
|
|
70171d6caf | ||
|
|
0f1898c82a | ||
|
|
db16335aec | ||
|
|
3082ce6a22 | ||
|
|
fe0cf80e05 | ||
|
|
a5e6097af7 | ||
|
|
d4a1618e07 | ||
|
|
48a21970d1 | ||
|
|
864d47043c | ||
|
|
c2af2bcac3 | ||
|
|
7c79c744b6 | ||
|
|
94d143763e | ||
|
|
6f9425c593 | ||
|
|
05953e4491 | ||
|
|
6b236f130c | ||
|
|
fa08de669e | ||
|
|
c2193b5470 | ||
|
|
b5b94d89d3 | ||
|
|
dd57b58c2f | ||
|
|
29a30963b3 | ||
|
|
3447e425f0 | ||
|
|
518da16833 | ||
|
|
4633327e60 | ||
|
|
6930176ac6 | ||
|
|
bb64b22066 | ||
|
|
8b0ab67de4 | ||
|
|
dbf21bf843 | ||
|
|
af3a29596e | ||
|
|
80944d22f7 | ||
|
|
f56efaff3e | ||
|
|
83bb2002b4 | ||
|
|
16fa3b9f07 | ||
|
|
6cd2241e49 | ||
|
|
6af45230b4 | ||
|
|
a8285f0eec | ||
|
|
e7456e1aab | ||
|
|
dd636dd3fb | ||
|
|
a73c95b734 | ||
|
|
33b355a085 | ||
|
|
f7630f265b | ||
|
|
9744e86d02 | ||
|
|
ff6bbf03a1 | ||
|
|
0767c8673e | ||
|
|
9aa75eaf65 | ||
|
|
73f012b999 | ||
|
|
c7a8a83cbf | ||
|
|
5f87db98ea | ||
|
|
d05dc8a468 | ||
|
|
afa2a2566e | ||
|
|
581f45b639 | ||
|
|
92780a9af6 | ||
|
|
2ea8e6c820 | ||
|
|
ac976a4bf4 | ||
|
|
e5f3ffc04f | ||
|
|
7aaed4d6f3 | ||
|
|
f5d717cd5a | ||
|
|
cb018fd7eb | ||
|
|
e5cebb6b6f | ||
|
|
4738b45fb1 | ||
|
|
343ed8a3fa | ||
|
|
58e5315089 | ||
|
|
26649e71f9 | ||
|
|
2f2d9ae30d | ||
|
|
f9c0a15ba0 | ||
|
|
14cb923dd8 | ||
|
|
544a121248 | ||
|
|
cd6bb9e159 | ||
|
|
e420a685a9 | ||
|
|
40a5c1ff2d | ||
|
|
6933e1c3cb | ||
|
|
160bfd881d | ||
|
|
81997ae6d6 | ||
|
|
702a2250fa | ||
|
|
3a0f9ce226 | ||
|
|
a095c8113d | ||
|
|
4ef433b64d | ||
|
|
f228c7cbcc | ||
|
|
e9ca16ab07 | ||
|
|
47ac2b8d09 | ||
|
|
b1b8500eba | ||
|
|
060a1ff2f3 | ||
|
|
9ed9a541c9 | ||
|
|
1ebf1c8d1c | ||
|
|
c2f3943e9e | ||
|
|
1ba530bff5 | ||
|
|
cc09e88a4a | ||
|
|
2f3801196d | ||
|
|
d03289c38b | ||
|
|
e720d8640a | ||
|
|
00602cda4f | ||
|
|
35882130ce | ||
|
|
1586c8c786 | ||
|
|
a9e78dc7d8 | ||
|
|
b53b235cff | ||
|
|
33cb8c988f | ||
|
|
6511d3dfff | ||
|
|
272ca0fc24 | ||
|
|
a8f42b865f | ||
|
|
7739c54eb5 | ||
|
|
bd1bb7d1ba | ||
|
|
6983db1392 | ||
|
|
2a797f90b4 | ||
|
|
2e097b4cbd | ||
|
|
a1282337c0 | ||
|
|
361d973f97 | ||
|
|
64ec6e7d8e | ||
|
|
9f95945cb5 | ||
|
|
21f3240e08 | ||
|
|
28d617c1c8 | ||
|
|
7da4b3569f | ||
|
|
f8aa66b62e | ||
|
|
a1d3e0002c | ||
|
|
148dce96ed | ||
|
|
9e01199e13 | ||
|
|
ed7274a4d0 | ||
|
|
f2963e41ba | ||
|
|
069762cd37 | ||
|
|
195f965076 | ||
|
|
3fff8be929 | ||
|
|
1bf758a784 | ||
|
|
9b8fb413c3 | ||
|
|
51275df0b1 | ||
|
|
af13d16c2c | ||
|
|
37f48aff8b | ||
|
|
feda52f800 | ||
|
|
8959d65577 | ||
|
|
546695f193 | ||
|
|
c3f5ee54d4 | ||
|
|
d64f312726 | ||
|
|
b4b25dec64 | ||
|
|
81172f9251 | ||
|
|
cbf9dd0aee | ||
|
|
7ecb9243c1 | ||
|
|
e96f31c29d | ||
|
|
53d5011192 | ||
|
|
751b64cbcd | ||
|
|
f57c2501a3 | ||
|
|
1c8073c21f | ||
|
|
86520abb68 | ||
|
|
bf88ed45da | ||
|
|
b4cf3d9f18 | ||
|
|
8e19576ec5 | ||
|
|
3c590ad071 | ||
|
|
3e47f3f05c | ||
|
|
d9edc92119 | ||
|
|
2a245fdd21 | ||
|
|
932d7a65e0 | ||
|
|
6bd2dd032b | ||
|
|
c0a4be156c | ||
|
|
0c30418732 | ||
|
|
3063093322 | ||
|
|
f4bbc0dbd2 | ||
|
|
1ecb100e43 | ||
|
|
e1da9339d9 | ||
|
|
2d203df075 | ||
|
|
50f25964cf | ||
|
|
95558d67ae | ||
|
|
83532b5469 | ||
|
|
444c27ca53 | ||
|
|
d075732cc5 | ||
|
|
cf9a32e6db | ||
|
|
bc54aa1e82 | ||
|
|
88622d5129 | ||
|
|
d0982115b3 | ||
|
|
1e4a5791b2 | ||
|
|
8def7f5583 | ||
|
|
66f07088cb | ||
|
|
bf6d5df0ec | ||
|
|
3eac79bba7 | ||
|
|
47c9760492 | ||
|
|
a452e8379e | ||
|
|
a6466b9ddd | ||
|
|
96548047f8 | ||
|
|
a675156c70 | ||
|
|
cfc5363053 | ||
|
|
d9167834c4 | ||
|
|
8a4860480a | ||
|
|
f4c813f74a | ||
|
|
8b4e557fed | ||
|
|
c5d0fd42e6 | ||
|
|
428202b246 | ||
|
|
1c0d3bc071 | ||
|
|
eea3c07628 | ||
|
|
7cd5fcb484 | ||
|
|
bbb4c939da | ||
|
|
f915489c62 | ||
|
|
1527853efd | ||
|
|
d820cf73e9 | ||
|
|
8714b24420 | ||
|
|
0c18f81b80 | ||
|
|
d442fac69a | ||
|
|
76c57af021 | ||
|
|
27a0425e5d | ||
|
|
4bade7ef96 | ||
|
|
a0e33bf7b0 | ||
|
|
cbc39977ca | ||
|
|
06fc24df5e | ||
|
|
9543abd2d9 | ||
|
|
004d3e4cca | ||
|
|
25aff66d34 | ||
|
|
9bd77b2ed3 | ||
|
|
5de1c1c98f | ||
|
|
5b9b5eaa28 | ||
|
|
00ee72396f | ||
|
|
aa4d55004c | ||
|
|
468f6c757e | ||
|
|
0907d43783 | ||
|
|
c9e5173bbd | ||
|
|
0019faaa17 | ||
|
|
e30f53f206 | ||
|
|
f2ba25e09d | ||
|
|
405de56c71 | ||
|
|
ba571f2404 | ||
|
|
4c1785d5f6 | ||
|
|
fa4d5ee929 | ||
|
|
8720cec283 | ||
|
|
72b36ac144 | ||
|
|
79896ee85c | ||
|
|
408ee04014 | ||
|
|
3f594e86a1 | ||
|
|
46c1a8e4c6 | ||
|
|
b2d3e01fe6 | ||
|
|
681639985a | ||
|
|
a1ca1a944a | ||
|
|
4f49f7b9df | ||
|
|
fb584853dd | ||
|
|
cc47b06756 | ||
|
|
b68a620fc2 | ||
|
|
e417ca54a0 | ||
|
|
5bbf8454d0 | ||
|
|
67b8dd0913 | ||
|
|
a42eb0d2bd | ||
|
|
294e659ae8 | ||
|
|
55198c49e5 | ||
|
|
dc071a3995 | ||
|
|
db5d0ac6ac | ||
|
|
2802013dc6 | ||
|
|
37bafce384 | ||
|
|
da0813b049 | ||
|
|
e2bb2595b3 | ||
|
|
b7cbcfdcab | ||
|
|
9cde25b39e | ||
|
|
49ea0a8e2e | ||
|
|
d317ddfebe | ||
|
|
b1eef4c82d | ||
|
|
a4ad365de0 | ||
|
|
8c257d55b4 | ||
|
|
bd165ebc4d | ||
|
|
348e5cb522 | ||
|
|
7cc17f208c | ||
|
|
2913cd936a | ||
|
|
361a185ddb | ||
|
|
9d5615620a | ||
|
|
ae185087e7 | ||
|
|
4a96d29e69 | ||
|
|
1e44f33163 | ||
|
|
348493abcd | ||
|
|
2bc4bfa877 | ||
|
|
3e3b287761 | ||
|
|
74bbb1ef1b | ||
|
|
22405fbb68 | ||
|
|
14d935bd6c | ||
|
|
363b9d3c7b | ||
|
|
8347ae3766 | ||
|
|
1106f6b9f2 | ||
|
|
e22117304e | ||
|
|
10999c0283 | ||
|
|
7adeee0980 | ||
|
|
a9cfa32c34 | ||
|
|
718aa8b82f | ||
|
|
dbf3bed380 | ||
|
|
ef55c7c916 | ||
|
|
2015d3d2bc | ||
|
|
76bac6d4bf | ||
|
|
b960d476e3 | ||
|
|
8a311d7746 | ||
|
|
39d2baec8a | ||
|
|
26e063177d | ||
|
|
149d1946ee | ||
|
|
3604f6238d | ||
|
|
2ad9470670 | ||
|
|
8dde74854a | ||
|
|
fa5aadbbc0 | ||
|
|
0989cb8866 | ||
|
|
b536260eb5 | ||
|
|
8f2de4663e | ||
|
|
0693892521 | ||
|
|
3be78717d2 | ||
|
|
655d123785 | ||
|
|
b8cb36ce50 | ||
|
|
bc3cd02776 | ||
|
|
a027adcaa2 | ||
|
|
3783032d28 | ||
|
|
c0ac5e3f6b | ||
|
|
87371d58d5 | ||
|
|
ef11fd7f75 | ||
|
|
d0f046e788 | ||
|
|
794fb9b252 | ||
|
|
86c7d646c3 | ||
|
|
7d96077667 | ||
|
|
a6fbfedc08 | ||
|
|
a6cfeabc10 | ||
|
|
a3a29006aa | ||
|
|
a5cb7a9816 | ||
|
|
edf4aa9f52 | ||
|
|
02c680ec3a | ||
|
|
8248e180ca | ||
|
|
c9677b2465 | ||
|
|
3752fe9e42 | ||
|
|
8a0de10f60 | ||
|
|
6aa8d76e32 | ||
|
|
fb1d0f60d9 | ||
|
|
728eaa515f | ||
|
|
7c354095a9 | ||
|
|
64ef33767f | ||
|
|
265432f7b7 | ||
|
|
aa7dfdb5c7 | ||
|
|
bfe37435a4 | ||
|
|
285a50f862 | ||
|
|
995e82e72b | ||
|
|
3935e047c6 | ||
|
|
0fd2427d9b | ||
|
|
30d29d0201 | ||
|
|
3e1f2392d4 | ||
|
|
6a12a40208 | ||
|
|
90e73391c2 | ||
|
|
deec1b7c2e | ||
|
|
d9cb1a1070 | ||
|
|
01747b50df | ||
|
|
df01a11e07 | ||
|
|
7a4b479724 | ||
|
|
89e34d56a1 | ||
|
|
a5853ee51a | ||
|
|
537ab48167 | ||
|
|
e43a090877 | ||
|
|
275a2f35b5 | ||
|
|
dae746bb96 | ||
|
|
3923b81d87 | ||
|
|
5d582a5e48 | ||
|
|
7dbc712fba | ||
|
|
639ef9e24a | ||
|
|
86d2200523 | ||
|
|
fe6860e0d7 | ||
|
|
8f2e68aeb8 | ||
|
|
bc4c887452 | ||
|
|
b3534b4435 | ||
|
|
861bb4d35a | ||
|
|
65e7ec0509 | ||
|
|
1ab8886695 | ||
|
|
26136c337f | ||
|
|
e3b71b32aa | ||
|
|
6d1711f4c2 | ||
|
|
26f291ef25 | ||
|
|
da030617a1 | ||
|
|
1ebfcd3b18 | ||
|
|
d385a57da3 | ||
|
|
37df8bfc73 | ||
|
|
b781a530a1 | ||
|
|
390b0aa25c | ||
|
|
620835e30c | ||
|
|
da10487219 | ||
|
|
4d51810888 | ||
|
|
6c7b2e1056 | ||
|
|
749e99bf11 | ||
|
|
6db8e0a61e | ||
|
|
6fe914421a | ||
|
|
9275f180bb | ||
|
|
2541b42fc2 | ||
|
|
fb340f130b | ||
|
|
d2ddd99ef6 | ||
|
|
492a8111b9 | ||
|
|
d846664165 | ||
|
|
31b3e4898b | ||
|
|
82f1267486 | ||
|
|
19202b2528 | ||
|
|
831cbec71f | ||
|
|
bb2ff802e2 | ||
|
|
83e9537f57 | ||
|
|
3488e83deb | ||
|
|
c116eee921 | ||
|
|
9cb291b41b | ||
|
|
c0f1072dc7 | ||
|
|
3108036533 | ||
|
|
215c699307 | ||
|
|
f609093c6e | ||
|
|
eb4fd98f09 | ||
|
|
08da9a854a | ||
|
|
3a18fe04cc | ||
|
|
512e41a84a | ||
|
|
8089aedde1 | ||
|
|
6b9e103305 | ||
|
|
00396fbe6c | ||
|
|
a3be9cb853 | ||
|
|
81f58229ab | ||
|
|
2eb16a8ea2 | ||
|
|
9db782f8d9 | ||
|
|
633df54520 | ||
|
|
e2a7f2ee9a | ||
|
|
28c49930e2 | ||
|
|
6c1868f8ae | ||
|
|
4f992475f4 | ||
|
|
7a358c9005 | ||
|
|
b5079614b0 | ||
|
|
482525d0f9 | ||
|
|
599220924d | ||
|
|
d341be83e5 | ||
|
|
b027d7d0de | ||
|
|
0357df0c8b | ||
|
|
f70ae6e3c4 | ||
|
|
921ed1c21b | ||
|
|
c95d43771a | ||
|
|
db3d816f8b | ||
|
|
1d6a142608 | ||
|
|
98271c3712 | ||
|
|
e3f6df884e | ||
|
|
b0f36b2cd9 | ||
|
|
5524492e25 | ||
|
|
112f045352 | ||
|
|
72ed8711a7 | ||
|
|
55e0c2c900 | ||
|
|
e20c05fcdf | ||
|
|
36183eac40 | ||
|
|
7254c76b68 | ||
|
|
e0e6ff5a68 | ||
|
|
b0d49d4973 | ||
|
|
4ce5d14066 | ||
|
|
9e9653ac58 | ||
|
|
bec873aec9 | ||
|
|
bf7f54449b | ||
|
|
9f0e3c0fed | ||
|
|
79e7da9420 | ||
|
|
0f43074f3e | ||
|
|
d297098504 | ||
|
|
284eaf1afe | ||
|
|
da637dba84 | ||
|
|
931fce2c24 | ||
|
|
42fbf17c82 | ||
|
|
d9cacf664c | ||
|
|
7bf6780de2 | ||
|
|
91178d40f3 | ||
|
|
2817cd2936 | ||
|
|
92a6ddcbc3 | ||
|
|
58017f484c | ||
|
|
86d2e1af97 | ||
|
|
bf23be291b | ||
|
|
3b32a9918c | ||
|
|
f0260c84b4 | ||
|
|
8746c75db0 | ||
|
|
e8f230199f | ||
|
|
1e3c7abc1c | ||
|
|
12e51da102 | ||
|
|
992291c738 | ||
|
|
78e63fa257 | ||
|
|
487ea8b263 | ||
|
|
0d877b4184 | ||
|
|
994544f208 | ||
|
|
36bb2a5d09 | ||
|
|
071c1c38dc | ||
|
|
b480ae2b7d | ||
|
|
7a390f503d | ||
|
|
b7cb3462d4 | ||
|
|
f2230100ac | ||
|
|
4b06862a7f | ||
|
|
06057d6dba | ||
|
|
bb03a1768b | ||
|
|
75ed26258c | ||
|
|
1da8477a3c | ||
|
|
4c111554ae | ||
|
|
615312fcee | ||
|
|
453625014d | ||
|
|
1b75651af6 | ||
|
|
b3e3604f46 | ||
|
|
6c4ce379ca | ||
|
|
a9dcba76ce | ||
|
|
32f21f2a01 | ||
|
|
e60bbd1bfc | ||
|
|
71c5b948d0 | ||
|
|
726d6b9881 | ||
|
|
aff64c02e8 | ||
|
|
31ae5cba91 | ||
|
|
0a91d2411a | ||
|
|
5f3af3d5e4 | ||
|
|
37158cb913 | ||
|
|
a596e16a37 | ||
|
|
4e69f5121f | ||
|
|
2a0f4393c3 | ||
|
|
c9e1e7d90c | ||
|
|
7170f2252c | ||
|
|
b09073e01e | ||
|
|
2d509dc3eb | ||
|
|
8a9d45cc29 | ||
|
|
b25f8643ff | ||
|
|
9120b6644d | ||
|
|
68dbd25f5f | ||
|
|
9e54134daf | ||
|
|
08a9345fcc | ||
|
|
7d072cc16f | ||
|
|
d81f457e7a |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
39
.github/workflows/bootstrap.yml
vendored
39
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -42,8 +42,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -80,8 +80,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -145,8 +145,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -158,13 +158,16 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -179,7 +182,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +207,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +250,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -265,6 +268,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -283,7 +287,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -302,8 +306,8 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -316,10 +320,11 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -333,13 +338,13 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==23.9.1
|
||||
black==23.10.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.5.1
|
||||
mypy==1.6.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
||||
|
||||
26
.github/workflows/unit_tests.yaml
vendored
26
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
@@ -45,12 +45,16 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +98,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +137,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,10 +156,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -185,12 +189,12 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
10
.github/workflows/valid-style.yml
vendored
10
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
12
.github/workflows/windows_python.yml
vendored
12
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -39,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,3 +1,33 @@
|
||||
# v0.20.3 (2023-10-31)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
|
||||
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
|
||||
|
||||
|
||||
# v0.20.2 (2023-10-03)
|
||||
|
||||
## Features in this release
|
||||
|
||||
Spack now supports Python 3.12 (#40155)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Improve escaping in Tcl module files (#38375)
|
||||
- Make repo cache work on repositories with zero mtime (#39214)
|
||||
- Ignore errors for newer, incompatible buildcache version (#40279)
|
||||
- Print an error when git is required, but missing (#40254)
|
||||
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
|
||||
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
|
||||
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
|
||||
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
|
||||
- Fix a bug triggered when file locking fails internally (#39188)
|
||||
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
|
||||
- Fix multiple performance regressions in environments (#38771)
|
||||
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
|
||||
|
||||
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
54
CITATION.cff
54
CITATION.cff
@@ -27,12 +27,53 @@
|
||||
# And here's the CITATION.cff format:
|
||||
#
|
||||
cff-version: 1.2.0
|
||||
type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
doi: "10.1145/2807591.2807623"
|
||||
url: "https://github.com/spack/spack"
|
||||
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
given-names: "Matthew"
|
||||
- family-names: "Collette"
|
||||
given-names: "Michael R."
|
||||
- family-names: "Lee"
|
||||
given-names: "Gregory L."
|
||||
- family-names: "Moody"
|
||||
given-names: "Adam"
|
||||
- family-names: "de Supinski"
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "US"
|
||||
date-start: 2015-11-15
|
||||
date-end: 2015-11-20
|
||||
month: 11
|
||||
year: 2015
|
||||
identifiers:
|
||||
- description: "The concept DOI of the work."
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
@@ -47,12 +88,3 @@ preferred-citation:
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "USA"
|
||||
month: November 15-20
|
||||
year: 2015
|
||||
notes: LLNL-CONF-669890
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -62,7 +63,10 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
||||
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
github-actions-v0.3: true
|
||||
spack-install: true
|
||||
|
||||
@@ -41,4 +41,4 @@ concretizer:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
strategy: minimal
|
||||
|
||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
package_list.html
|
||||
command_index.rst
|
||||
spack*.rst
|
||||
llnl*.rst
|
||||
|
||||
@@ -45,7 +45,8 @@ Listing available packages
|
||||
|
||||
To install software with Spack, you need to know what software is
|
||||
available. You can see a list of available package names at the
|
||||
:ref:`package-list` webpage, or using the ``spack list`` command.
|
||||
`packages.spack.io <https://packages.spack.io>`_ website, or
|
||||
using the ``spack list`` command.
|
||||
|
||||
.. _cmd-spack-list:
|
||||
|
||||
@@ -60,7 +61,7 @@ can install:
|
||||
:ellipsis: 10
|
||||
|
||||
There are thousands of them, so we've truncated the output above, but you
|
||||
can find a :ref:`full list here <package-list>`.
|
||||
can find a `full list here <https://packages.spack.io>`_.
|
||||
Packages are listed by name in alphabetical order.
|
||||
A pattern to match with no wildcards, ``*`` or ``?``,
|
||||
will be treated as though it started and ended with
|
||||
@@ -1525,6 +1526,30 @@ any MPI implementation will do. If another package depends on
|
||||
error. Likewise, if you try to plug in some package that doesn't
|
||||
provide MPI, Spack will raise an error.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Explicit binding of virtual dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are packages that provide more than just one virtual dependency. When interacting with them, users
|
||||
might want to utilize just a subset of what they could provide, and use other providers for virtuals they
|
||||
need.
|
||||
|
||||
It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
|
||||
special syntax:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
|
||||
|
||||
Concretizing the spec above produces the following DAG:
|
||||
|
||||
.. figure:: images/strumpack_virtuals.svg
|
||||
:scale: 60 %
|
||||
:align: center
|
||||
|
||||
where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
|
||||
and ``blas`` dependencies are satisfied by ``openblas``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying Specs by Hash
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -156,6 +156,131 @@ List of popular build caches
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
||||
Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
|
||||
Github Packages, GitLab Container Registry, JFrog Artifactory, and others
|
||||
as build caches. This is a convenient way to share binaries using public
|
||||
infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify a username and password (or personal access token):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
is a great way to make applications available to users without requiring them to
|
||||
install Spack -- all you need is Docker, Podman or any other OCI-compatible container
|
||||
runtime.
|
||||
|
||||
To produce container images, all you need to do is add the ``--base-image`` flag
|
||||
when pushing to the build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
|
||||
Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
|
||||
$ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
root@e4c2b6f6b3f4:/# ninja --version
|
||||
1.11.1
|
||||
|
||||
If ``--base-image`` is not specified, distroless images are produced. In practice,
|
||||
you won't be able to run these as containers, since they don't come with libc and
|
||||
other system dependencies. However, they are still compatible with tools like
|
||||
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
|
||||
|
||||
.. note::
|
||||
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
|
||||
``max depth exceeded`` error may be produced when pulling the image. There
|
||||
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
|
||||
|
||||
------------------------------------
|
||||
Using a buildcache in GitHub Actions
|
||||
------------------------------------
|
||||
|
||||
GitHub Actions is a popular CI/CD platform for building and testing software,
|
||||
but each CI job has limited resources, making from source builds too slow for
|
||||
many applications. Spack build caches can be used to share binaries between CI
|
||||
runs, speeding up CI significantly.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: [pkg-x, pkg-y]
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
github_packages: oci://ghcr.io/<user>/<repo>
|
||||
|
||||
And a GitHub action that sets up Spack, installs packages from the build cache
|
||||
or from sources, and pushes newly built binaries to the build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Spack
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/spack/spack.git
|
||||
echo "$PWD/spack/bin/" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature --fail-fast
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username <user> --oci-password "${{ secrets.GITHUB_TOKEN }}" github_packages
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index github_packages
|
||||
if: always()
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
----------
|
||||
Relocation
|
||||
----------
|
||||
|
||||
@@ -3,6 +3,103 @@
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
==========================================
|
||||
Concretization Settings (concretizer.yaml)
|
||||
==========================================
|
||||
|
||||
The ``concretizer.yaml`` configuration file allows to customize aspects of the
|
||||
algorithm used to select the dependencies you install. The default configuration
|
||||
is the following:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
--------------------------------
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
------------------------------------------
|
||||
Selection of the target microarchitectures
|
||||
------------------------------------------
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
---------------
|
||||
Duplicate nodes
|
||||
---------------
|
||||
|
||||
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
|
||||
the same package. This is mainly relevant for build dependencies, which may have their version
|
||||
pinned by some nodes, and thus be required at different versions by different nodes in the same
|
||||
DAG.
|
||||
|
||||
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
|
||||
then a single configuration per package is allowed in the DAG. This means, for instance, that only
|
||||
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
|
||||
faster concretization, at the expense of making a few specs unsolvable.
|
||||
|
||||
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
|
||||
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
|
||||
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
|
||||
while `py-numpy` still needs an older version:
|
||||
|
||||
.. figure:: images/shapely_duplicates.svg
|
||||
:scale: 70 %
|
||||
:align: center
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
@@ -232,76 +329,6 @@ Specific limitations include:
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
|
||||
@@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('autoconf', type='build', when='@master')
|
||||
depends_on('automake', type='build', when='@master')
|
||||
depends_on('libtool', type='build', when='@master')
|
||||
depends_on("autoconf", type="build", when="@master")
|
||||
depends_on("automake", type="build", when="@master")
|
||||
depends_on("libtool", type="build", when="@master")
|
||||
|
||||
It is typically redundant to list the ``m4`` macro processor package as a
|
||||
dependency, since ``autoconf`` already depends on it.
|
||||
@@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
.. code-block:: python
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
which('bash')('autogen.sh')
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
@@ -186,9 +186,9 @@ To opt out of this feature, use the following setting:
|
||||
To enable it conditionally on different architectures, define a property and
|
||||
make the package depend on ``gnuconfig`` as a build dependency:
|
||||
|
||||
.. code-block
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('gnuconfig', when='@1.0:')
|
||||
depends_on("gnuconfig", when="@1.0:")
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
@@ -230,7 +230,7 @@ version, this can be done like so:
|
||||
|
||||
@property
|
||||
def force_autoreconf(self):
|
||||
return self.version == Version('1.2.3')
|
||||
return self.version == Version("1.2.3")
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Finding configure flags
|
||||
@@ -278,13 +278,22 @@ function like so:
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
if '+mpi' in self.spec:
|
||||
args.append('--enable-mpi')
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
else:
|
||||
args.append('--disable-mpi')
|
||||
args.append("--disable-mpi")
|
||||
|
||||
return args
|
||||
|
||||
|
||||
Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_disable>` helper:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return [self.enable_or_disable("mpi")]
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
requested. This is important, as many Autotools packages will enable
|
||||
options by default if the dependencies are found, and disable them
|
||||
@@ -295,9 +304,11 @@ and `here <https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Automagic_depe
|
||||
for a rationale as to why these so-called "automagic" dependencies
|
||||
are a problem.
|
||||
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
.. note::
|
||||
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Helper functions
|
||||
@@ -308,6 +319,8 @@ You may have noticed that most of the Autotools flags are of the form
|
||||
``--without-baz``. Since these flags are so common, Spack provides a
|
||||
couple of helper functions to make your life easier.
|
||||
|
||||
.. _autotools_enable_or_disable:
|
||||
|
||||
"""""""""""""""""
|
||||
enable_or_disable
|
||||
"""""""""""""""""
|
||||
@@ -319,11 +332,11 @@ typically used to enable or disable some feature within the package.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'memchecker',
|
||||
"memchecker",
|
||||
default=False,
|
||||
description='Memchecker support for debugging [degrades performance]'
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
)
|
||||
config_args.extend(self.enable_or_disable('memchecker'))
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
@@ -343,15 +356,15 @@ the ``with_or_without`` method.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'schedulers',
|
||||
"schedulers",
|
||||
values=disjoint_sets(
|
||||
('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
("auto",), ("alps", "lsf", "tm", "slurm", "sge", "loadleveler")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
description="List of schedulers for which support is enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if 'schedulers=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('schedulers'))
|
||||
if not spec.satisfies("schedulers=auto"):
|
||||
config_args.extend(self.with_or_without("schedulers"))
|
||||
|
||||
In this example, specifying the variant ``schedulers=slurm,sge`` will
|
||||
generate the following configuration options:
|
||||
@@ -376,16 +389,16 @@ generated, using the ``activation_value`` argument to
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'fabrics',
|
||||
"fabrics",
|
||||
values=disjoint_sets(
|
||||
('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
("auto",), ("psm", "psm2", "verbs", "mxm", "ucx", "libfabric")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
description="List of fabrics that are enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if 'fabrics=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('fabrics',
|
||||
activation_value='prefix'))
|
||||
if not spec.satisfies("fabrics=auto"):
|
||||
config_args.extend(self.with_or_without("fabrics",
|
||||
activation_value="prefix"))
|
||||
|
||||
``activation_value`` accepts a callable that generates the configure
|
||||
parameter value given the variant value; but the special value
|
||||
@@ -409,16 +422,16 @@ When Spack variants and configure flags do not correspond one-to-one, the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.enable_or_disable("debug-tools", variant="debug_tools")
|
||||
|
||||
Or when one variant controls multiple flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.with_or_without('memchecker', variant='debug_tools')
|
||||
config_args += self.with_or_without('profiler', variant='debug_tools')
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.with_or_without("memchecker", variant="debug_tools")
|
||||
config_args += self.with_or_without("profiler", variant="debug_tools")
|
||||
|
||||
|
||||
""""""""""""""""""""
|
||||
@@ -432,8 +445,8 @@ For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler')
|
||||
variant("profiler", when="@2.0:")
|
||||
config_args += self.with_or_without("profiler")
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
@@ -452,10 +465,10 @@ the variant values require atypical behavior.
|
||||
def with_or_without_verbs(self, activated):
|
||||
# Up through version 1.6, this option was named --with-openib.
|
||||
# In version 1.7, it was renamed to be --with-verbs.
|
||||
opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib'
|
||||
opt = "verbs" if self.spec.satisfies("@1.7:") else "openib"
|
||||
if not activated:
|
||||
return '--without-{0}'.format(opt)
|
||||
return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix)
|
||||
return f"--without-{opt}"
|
||||
return f"--with-{opt}={self.spec['rdma-core'].prefix}"
|
||||
|
||||
Defining ``with_or_without_verbs`` overrides the behavior of a
|
||||
``fabrics=verbs`` variant, changing the configure-time option to
|
||||
@@ -479,7 +492,7 @@ do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configure_directory = 'src'
|
||||
configure_directory = "src"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Building out of source
|
||||
@@ -491,7 +504,7 @@ This can be done using the ``build_directory`` variable:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'spack-build'
|
||||
build_directory = "spack-build"
|
||||
|
||||
By default, Spack will build the package in the same directory that
|
||||
contains the ``configure`` script
|
||||
@@ -514,8 +527,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -87,7 +87,7 @@ A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
def initconfig_mpi_entries(self):
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
@@ -95,25 +95,25 @@ A typical usage of these methods may look something like this:
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
if self.spec.satisfies("+mpi"):
|
||||
entries.append(cmake_cache_option("FOO_MPI", True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
entries.append(cmake_cache_option("FOO_MPI", False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
entries.append("#Entries for build options")
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
bar_on = self.spec.satisfies("+bar")
|
||||
entries.append(cmake_cache_option("FOO_BAR", bar_on, "toggle bar"))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
entries.append("#Entries for dependencies")
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
if self.spec["blas"].name == "baz": # baz is our blas provider
|
||||
entries.append(cmake_cache_string("FOO_BLAS", "baz", "Use baz"))
|
||||
entries.append(cmake_cache_path("BAZ_PREFIX", self.spec["baz"].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
||||
@@ -54,8 +54,8 @@ to terminate such build attempts with a suitable message:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
conflicts('cuda_arch=none', when='+cuda',
|
||||
msg='CUDA architecture is required')
|
||||
conflicts("cuda_arch=none", when="+cuda",
|
||||
msg="CUDA architecture is required")
|
||||
|
||||
Similarly, if your software does not support all versions of the property,
|
||||
you could add ``conflicts`` to your package for those versions. For example,
|
||||
@@ -66,13 +66,13 @@ custom message should a user attempt such a build:
|
||||
.. code-block:: python
|
||||
|
||||
unsupported_cuda_archs = [
|
||||
'10', '11', '12', '13',
|
||||
'20', '21',
|
||||
'30', '32', '35', '37'
|
||||
"10", "11", "12", "13",
|
||||
"20", "21",
|
||||
"30", "32", "35", "37"
|
||||
]
|
||||
for value in unsupported_cuda_archs:
|
||||
conflicts('cuda_arch={0}'.format(value), when='+cuda',
|
||||
msg='CUDA architecture {0} is not supported'.format(value))
|
||||
conflicts(f"cuda_arch={value}", when="+cuda",
|
||||
msg=f"CUDA architecture {value} is not supported")
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
@@ -107,16 +107,16 @@ class of your package. For example, you can add it to your
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if '+cuda' in spec:
|
||||
if spec.satisfies("+cuda"):
|
||||
# Set up the cuda macros needed by the build
|
||||
args.append('-DWITH_CUDA=ON')
|
||||
cuda_arch_list = spec.variants['cuda_arch'].value
|
||||
args.append("-DWITH_CUDA=ON")
|
||||
cuda_arch_list = spec.variants["cuda_arch"].value
|
||||
cuda_arch = cuda_arch_list[0]
|
||||
if cuda_arch != 'none':
|
||||
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch))
|
||||
if cuda_arch != "none":
|
||||
args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch}")
|
||||
else:
|
||||
# Ensure build with cuda is disabled
|
||||
args.append('-DWITH_CUDA=OFF')
|
||||
args.append("-DWITH_CUDA=OFF")
|
||||
...
|
||||
return args
|
||||
|
||||
@@ -125,7 +125,7 @@ You will need to customize options as needed for your build.
|
||||
|
||||
This example also illustrates how to check for the ``cuda`` variant using
|
||||
``self.spec`` and how to retrieve the ``cuda_arch`` variant's value, which
|
||||
is a list, using ``self.spec.variants['cuda_arch'].value``.
|
||||
is a list, using ``self.spec.variants["cuda_arch"].value``.
|
||||
|
||||
With over 70 packages using ``CudaPackage`` as of January 2021 there are
|
||||
lots of examples to choose from to get more ideas for using this package.
|
||||
|
||||
@@ -57,13 +57,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ['configure', 'build', 'install']
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ['bootstrap', 'build', 'install']
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
@@ -78,7 +78,7 @@ If we look at ``perl``, we see that it defines a ``configure`` method:
|
||||
.. code-block:: python
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
configure = Executable('./Configure')
|
||||
configure = Executable("./Configure")
|
||||
configure(*self.configure_args())
|
||||
|
||||
There is also a corresponding ``configure_args`` function that handles
|
||||
@@ -92,7 +92,7 @@ phases are pretty simple:
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make('install')
|
||||
make("install")
|
||||
|
||||
The ``cmake`` package looks very similar, but with a ``bootstrap``
|
||||
function instead of ``configure``:
|
||||
@@ -100,14 +100,14 @@ function instead of ``configure``:
|
||||
.. code-block:: python
|
||||
|
||||
def bootstrap(self, spec, prefix):
|
||||
bootstrap = Executable('./bootstrap')
|
||||
bootstrap = Executable("./bootstrap")
|
||||
bootstrap(*self.bootstrap_args())
|
||||
|
||||
def build(self, spec, prefix):
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make('install')
|
||||
make("install")
|
||||
|
||||
Again, there is a ``boostrap_args`` function that determines the
|
||||
correct bootstrap flags to use.
|
||||
@@ -128,16 +128,16 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after('install')
|
||||
@run_after("install")
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
|
||||
if '+cpanm' in spec:
|
||||
with working_dir(join_path('cpanm', 'cpanm')):
|
||||
perl = spec['perl'].command
|
||||
perl('Makefile.PL')
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
make()
|
||||
make('install')
|
||||
make("install")
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -174,10 +174,10 @@ In the ``perl`` package, we can see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after('build')
|
||||
@run_after("build")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
make('test')
|
||||
make("test")
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
@@ -189,7 +189,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after('install')
|
||||
@run_after("install")
|
||||
@on_package_attributes(run_tests=True)
|
||||
|
||||
works as expected. However, if you reverse the ordering:
|
||||
@@ -197,7 +197,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
.. code-block:: python
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
@run_after('install')
|
||||
@run_after("install")
|
||||
|
||||
the tests will always be run regardless of whether or not
|
||||
``--test=root`` is requested. See https://github.com/spack/spack/issues/3833
|
||||
|
||||
@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. See the :ref:`package-list` for the full list of available
|
||||
oneAPI packages or use::
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('gmake', type='build')
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
env['PREFIX'] = prefix
|
||||
env['BLASLIB'] = spec['blas'].libs.ld_flags
|
||||
env["PREFIX"] = prefix
|
||||
env["BLASLIB"] = spec["blas"].libs.ld_flags
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
@@ -113,7 +113,7 @@ you can do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ['CC=cc']
|
||||
build_targets = ["CC=cc"]
|
||||
|
||||
|
||||
If you do need access to the spec, you can create a property like so:
|
||||
@@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so:
|
||||
spec = self.spec
|
||||
|
||||
return [
|
||||
'CC=cc',
|
||||
'BLASLIB={0}'.format(spec['blas'].libs.ld_flags),
|
||||
"CC=cc",
|
||||
f"BLASLIB={spec['blas'].libs.ld_flags}",
|
||||
]
|
||||
|
||||
|
||||
@@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example:
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter('Makefile')
|
||||
makefile = FileFilter("Makefile")
|
||||
|
||||
makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc)
|
||||
makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx)
|
||||
makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77)
|
||||
makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc)
|
||||
makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}")
|
||||
makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}")
|
||||
makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}")
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
@@ -181,16 +181,16 @@ well for storing variables:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = {
|
||||
'CC': 'cc',
|
||||
'MAKE': 'make',
|
||||
"CC": "cc",
|
||||
"MAKE": "make",
|
||||
}
|
||||
|
||||
if '+blas' in spec:
|
||||
config['BLAS_LIBS'] = spec['blas'].libs.joined()
|
||||
if spec.satisfies("+blas"):
|
||||
config["BLAS_LIBS"] = spec["blas"].libs.joined()
|
||||
|
||||
with open('make.inc', 'w') as inc:
|
||||
with open("make.inc", "w") as inc:
|
||||
for key in config:
|
||||
inc.write('{0} = {1}\n'.format(key, config[key]))
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
@@ -204,14 +204,14 @@ them in a list:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = [
|
||||
'INSTALL_DIR = {0}'.format(prefix),
|
||||
'INCLUDE_DIR = $(INSTALL_DIR)/include',
|
||||
'LIBRARY_DIR = $(INSTALL_DIR)/lib',
|
||||
f"INSTALL_DIR = {prefix}",
|
||||
"INCLUDE_DIR = $(INSTALL_DIR)/include",
|
||||
"LIBRARY_DIR = $(INSTALL_DIR)/lib",
|
||||
]
|
||||
|
||||
with open('make.inc', 'w') as inc:
|
||||
with open("make.inc", "w") as inc:
|
||||
for var in config:
|
||||
inc.write('{0}\n'.format(var))
|
||||
inc.write(f"{var}\n")
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
@@ -284,7 +284,7 @@ can tell Spack where to locate it like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'src'
|
||||
build_directory = "src"
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -299,8 +299,8 @@ install the package:
|
||||
|
||||
def install(self, spec, prefix):
|
||||
mkdir(prefix.bin)
|
||||
install('foo', prefix.bin)
|
||||
install_tree('lib', prefix.lib)
|
||||
install("foo", prefix.bin)
|
||||
install_tree("lib", prefix.lib)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://pypi.org/project/setuptools/'
|
||||
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
|
||||
list_url = 'https://pypi.org/simple/setuptools/'
|
||||
homepage = "https://pypi.org/project/setuptools/"
|
||||
url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip"
|
||||
list_url = "https://pypi.org/simple/setuptools/"
|
||||
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
pypi = 'setuptools/setuptools-49.2.0.zip'
|
||||
pypi = "setuptools/setuptools-49.2.0.zip"
|
||||
|
||||
|
||||
If a package has a different homepage listed on PyPI, you can
|
||||
@@ -208,7 +208,7 @@ dependencies to your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-setuptools@42:', type='build')
|
||||
depends_on("py-setuptools@42:", type="build")
|
||||
|
||||
|
||||
Note that ``py-wheel`` is already listed as a build dependency in the
|
||||
@@ -232,7 +232,7 @@ Look for dependencies under the following keys:
|
||||
* ``dependencies`` under ``[project]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
add them with ``type=("build", "run")``.
|
||||
|
||||
* ``[project.optional-dependencies]``
|
||||
|
||||
@@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to
|
||||
* ``setup_requires``
|
||||
|
||||
These packages are usually only needed at build-time, so you can
|
||||
add them with ``type='build'``.
|
||||
add them with ``type="build"``.
|
||||
|
||||
* ``install_requires``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
add them with ``type=("build", "run")``.
|
||||
|
||||
* ``extras_require``
|
||||
|
||||
@@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to
|
||||
|
||||
These are packages that are required to run the unit tests for the
|
||||
package. These dependencies can be specified using the
|
||||
``type='test'`` dependency type. However, the PyPI tarballs rarely
|
||||
``type="test"`` dependency type. However, the PyPI tarballs rarely
|
||||
contain unit tests, so there is usually no reason to add these.
|
||||
|
||||
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
|
||||
@@ -321,7 +321,7 @@ older versions of flit may use the following keys:
|
||||
* ``requires`` under ``[tool.flit.metadata]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
add them with ``type=("build", "run")``.
|
||||
|
||||
* ``[tool.flit.metadata.requires-extra]``
|
||||
|
||||
@@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-pip@22.1:', type='build')
|
||||
depends_on("py-pip@22.1:", type="build")
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
return {
|
||||
'blas': spec['blas'].libs.names[0],
|
||||
'lapack': spec['lapack'].libs.names[0],
|
||||
"blas": spec["blas"].libs.names[0],
|
||||
"lapack": spec["lapack"].libs.names[0],
|
||||
}
|
||||
|
||||
|
||||
@@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
options = []
|
||||
if '+libyaml' in spec:
|
||||
options.append('--with-libyaml')
|
||||
if spec.satisfies("+libyaml"):
|
||||
options.append("--with-libyaml")
|
||||
else:
|
||||
options.append('--without-libyaml')
|
||||
options.append("--without-libyaml")
|
||||
return options
|
||||
|
||||
|
||||
@@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``:
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
options = []
|
||||
if '+libyaml' in spec:
|
||||
if spec.satisfies("+libyaml"):
|
||||
options.extend([
|
||||
spec['libyaml'].libs.search_flags,
|
||||
spec['libyaml'].headers.include_flags,
|
||||
spec["libyaml"].libs.search_flags,
|
||||
spec["libyaml"].headers.include_flags,
|
||||
])
|
||||
return options
|
||||
|
||||
@@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ['six']
|
||||
import_modules = ["six"]
|
||||
|
||||
|
||||
Sometimes the list of module names to import depends on how the
|
||||
@@ -571,9 +571,9 @@ This can be expressed like so:
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
modules = ['yaml']
|
||||
if '+libyaml' in self.spec:
|
||||
modules.append('yaml.cyaml')
|
||||
modules = ["yaml"]
|
||||
if self.spec.satisfies("+libyaml"):
|
||||
modules.append("yaml.cyaml")
|
||||
return modules
|
||||
|
||||
|
||||
@@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||
automatically detected ``import_modules`` ``["nilearn", "nilearn.surface",
|
||||
"nilearn.plotting", "nilearn.plotting.data"]`` set:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = ['nilearn.plotting']
|
||||
skip_modules = ["nilearn.plotting"]
|
||||
|
||||
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||
This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]``
|
||||
|
||||
Import tests can be run during the installation using ``spack install
|
||||
--test=root`` or at any time after the installation using
|
||||
@@ -612,11 +612,11 @@ after the ``install`` phase:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after('install')
|
||||
@run_after("install")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def install_test(self):
|
||||
with working_dir('spack-test', create=True):
|
||||
python('-c', 'import numpy; numpy.test("full", verbose=2)')
|
||||
with working_dir("spack-test", create=True):
|
||||
python("-c", "import numpy; numpy.test('full', verbose=2)")
|
||||
|
||||
|
||||
when testing is enabled during the installation (i.e., ``spack install
|
||||
@@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'python'
|
||||
build_directory = "python"
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -81,28 +81,27 @@ class of your package. For example, you can add it to your
|
||||
class MyRocmPackage(CMakePackage, ROCmPackage):
|
||||
...
|
||||
# Ensure +rocm and amdgpu_targets are passed to dependencies
|
||||
depends_on('mydeppackage', when='+rocm')
|
||||
depends_on("mydeppackage", when="+rocm")
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on('mydeppackage amdgpu_target={0}'.format(val),
|
||||
when='amdgpu_target={0}'.format(val))
|
||||
depends_on(f"mydeppackage amdgpu_target={val}",
|
||||
when=f"amdgpu_target={val}")
|
||||
...
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if '+rocm' in spec:
|
||||
if spec.satisfies("+rocm"):
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
.format(",".join(rocm_archs)))
|
||||
"-DENABLE_HIP=ON",
|
||||
f"-DHIP_ROOT_DIR={spec['hip'].prefix}"])
|
||||
rocm_archs = spec.variants["amdgpu_target"].value
|
||||
if "none" not in rocm_archs:
|
||||
args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}")
|
||||
else:
|
||||
# Ensure build with hip is disabled
|
||||
args.append('-DENABLE_HIP=OFF')
|
||||
args.append("-DENABLE_HIP=OFF")
|
||||
...
|
||||
return args
|
||||
...
|
||||
@@ -114,7 +113,7 @@ build.
|
||||
|
||||
This example also illustrates how to check for the ``rocm`` variant using
|
||||
``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value
|
||||
using ``self.spec.variants['amdgpu_target'].value``.
|
||||
using ``self.spec.variants["amdgpu_target"].value``.
|
||||
|
||||
All five packages using ``ROCmPackage`` as of January 2021 also use the
|
||||
:ref:`CudaPackage <cudapackage>`. So it is worth looking at those packages
|
||||
|
||||
@@ -57,7 +57,7 @@ overridden like so:
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
scons('check')
|
||||
scons("check")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
@@ -88,7 +88,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('scons', type='build')
|
||||
depends_on("scons", type="build")
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, you can override
|
||||
@@ -96,7 +96,7 @@ this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('scons@2.3.0:', type='build')
|
||||
depends_on("scons@2.3.0:", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so:
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
args = [
|
||||
'PREFIX={0}'.format(prefix),
|
||||
'ZLIB={0}'.format(spec['zlib'].prefix),
|
||||
f"PREFIX={prefix}",
|
||||
f"ZLIB={spec['zlib'].prefix}",
|
||||
]
|
||||
|
||||
if '+debug' in spec:
|
||||
args.append('DEBUG=yes')
|
||||
if spec.satisfies("+debug"):
|
||||
args.append("DEBUG=yes")
|
||||
else:
|
||||
args.append('DEBUG=no')
|
||||
args.append("DEBUG=no")
|
||||
|
||||
return args
|
||||
|
||||
@@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option:
|
||||
* env_vars: [ string ]
|
||||
Environment variables to propagate through to SCons. Either the
|
||||
string "all" or a comma separated list of variable names, e.g.
|
||||
'LD_LIBRARY_PATH,HOME'.
|
||||
- default: 'LD_LIBRARY_PATH,PYTHONPATH'
|
||||
"LD_LIBRARY_PATH,HOME".
|
||||
- default: "LD_LIBRARY_PATH,PYTHONPATH"
|
||||
|
||||
|
||||
In the case of cantera, using ``env_vars=all`` allows us to use
|
||||
|
||||
@@ -48,9 +48,6 @@
|
||||
os.environ["COLIFY_SIZE"] = "25x120"
|
||||
os.environ["COLUMNS"] = "120"
|
||||
|
||||
# Generate full package list if needed
|
||||
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||
|
||||
# Generate a command index if an update is needed
|
||||
subprocess.call(
|
||||
[
|
||||
@@ -207,6 +204,7 @@ def setup(sphinx):
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
||||
@@ -212,18 +212,12 @@ under the ``container`` attribute of environments:
|
||||
final:
|
||||
- libgomp
|
||||
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
app: "gromacs"
|
||||
mpi: "mpich"
|
||||
|
||||
A detailed description of the options available can be found in the
|
||||
:ref:`container_config_options` section.
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
Setting Base Images
|
||||
@@ -525,6 +519,13 @@ the example below:
|
||||
COPY data /share/myapp/data
|
||||
{% endblock %}
|
||||
|
||||
The Dockerfile is generated by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e /opt/environment containerize
|
||||
|
||||
Note that the environment must be active for spack to read the template.
|
||||
The recipe that gets generated contains the two extra instruction that we added in our template extension:
|
||||
|
||||
.. code-block:: Dockerfile
|
||||
|
||||
@@ -310,53 +310,11 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||
|
||||
At first, this error message will mean nothing to you, since you didn't edit
|
||||
that file. Until you look at line 8745 of the file in question:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
Description:
|
||||
NetCDF is a set of software libraries and self-describing, machine-
|
||||
independent data formats that support the creation, access, and sharing
|
||||
of array-oriented scientific data.
|
||||
|
||||
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||
If you add a new package, its docstring is added to this page. The problem in
|
||||
this case was that the docstring looked like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""
|
||||
NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data.
|
||||
"""
|
||||
|
||||
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||
Instead, they should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data."""
|
||||
|
||||
Documentation changes can result in much more obfuscated warning messages.
|
||||
If you don't understand what they mean, feel free to ask when you submit
|
||||
your PR.
|
||||
If you see any warning or error messages, you will have to correct those before your PR
|
||||
is accepted. If you are editing the documentation, you should be running the
|
||||
documentation tests to make sure there are no errors. Documentation changes can result
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
|
||||
BIN
lib/spack/docs/images/setup_env.png
Normal file
BIN
lib/spack/docs/images/setup_env.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 296 KiB |
2784
lib/spack/docs/images/shapely_duplicates.svg
Normal file
2784
lib/spack/docs/images/shapely_duplicates.svg
Normal file
File diff suppressed because it is too large
Load Diff
|
After Width: | Height: | Size: 108 KiB |
534
lib/spack/docs/images/strumpack_virtuals.svg
Normal file
534
lib/spack/docs/images/strumpack_virtuals.svg
Normal file
@@ -0,0 +1,534 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
--><!-- Title: G Pages: 1 --><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="3044pt" height="1683pt" viewBox="0.00 0.00 3043.65 1682.80">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1678.8)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1678.8 3039.6456,-1678.8 3039.6456,4 -4,4"/>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="node1" class="node">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2407.965,-1198.3002C2407.965,-1198.3002 1948.1742,-1198.3002 1948.1742,-1198.3002 1942.1742,-1198.3002 1936.1742,-1192.3002 1936.1742,-1186.3002 1936.1742,-1186.3002 1936.1742,-1123.6998 1936.1742,-1123.6998 1936.1742,-1117.6998 1942.1742,-1111.6998 1948.1742,-1111.6998 1948.1742,-1111.6998 2407.965,-1111.6998 2407.965,-1111.6998 2413.965,-1111.6998 2419.965,-1117.6998 2419.965,-1123.6998 2419.965,-1123.6998 2419.965,-1186.3002 2419.965,-1186.3002 2419.965,-1192.3002 2413.965,-1198.3002 2407.965,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt</text>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="node8" class="node">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M901.2032,-1039.5002C901.2032,-1039.5002 486.936,-1039.5002 486.936,-1039.5002 480.936,-1039.5002 474.936,-1033.5002 474.936,-1027.5002 474.936,-1027.5002 474.936,-964.8998 474.936,-964.8998 474.936,-958.8998 480.936,-952.8998 486.936,-952.8998 486.936,-952.8998 901.2032,-952.8998 901.2032,-952.8998 907.2032,-952.8998 913.2032,-958.8998 913.2032,-964.8998 913.2032,-964.8998 913.2032,-1027.5002 913.2032,-1027.5002 913.2032,-1033.5002 907.2032,-1039.5002 901.2032,-1039.5002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">openblas@0.3.21%gcc@9.4.0/o524geb</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1936.1981,-1113.832C1933.0949,-1113.4088 1930.0059,-1112.9948 1926.9392,-1112.5915 1575.405,-1066.3348 1485.3504,-1074.0879 1131.9752,-1040.5955 1064.2267,-1034.1713 990.6114,-1026.9648 923.4066,-1020.2975"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1936.4684,-1111.8504C1933.3606,-1111.4265 1930.2716,-1111.0125 1927.2,-1110.6085 1575.2335,-1064.3422 1485.1789,-1072.0953 1132.164,-1038.6045 1064.4216,-1032.1808 990.8062,-1024.9744 923.604,-1018.3073"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="923.505,-1015.7853 913.2081,-1018.2801 922.8133,-1022.751 923.505,-1015.7853"/>
|
||||
<text text-anchor="middle" x="1368.79" y="-1067.6346" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="node23" class="node">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2767.3081,-1039.5002C2767.3081,-1039.5002 2166.8311,-1039.5002 2166.8311,-1039.5002 2160.8311,-1039.5002 2154.8311,-1033.5002 2154.8311,-1027.5002 2154.8311,-1027.5002 2154.8311,-964.8998 2154.8311,-964.8998 2154.8311,-958.8998 2160.8311,-952.8998 2166.8311,-952.8998 2166.8311,-952.8998 2767.3081,-952.8998 2767.3081,-952.8998 2773.3081,-952.8998 2779.3081,-958.8998 2779.3081,-964.8998 2779.3081,-964.8998 2779.3081,-1027.5002 2779.3081,-1027.5002 2779.3081,-1033.5002 2773.3081,-1039.5002 2767.3081,-1039.5002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge29" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2256.5586,-1110.7308C2294.3103,-1089.9869 2339.6329,-1065.083 2378.4976,-1043.7276"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2257.5217,-1112.4836C2295.2735,-1091.7397 2340.5961,-1066.8358 2379.4607,-1045.4804"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2381.116,-1047.4235 2388.1946,-1039.5403 2377.745,-1041.2886 2381.116,-1047.4235"/>
|
||||
<text text-anchor="middle" x="2286.6606" y="-1079.8414" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="node27" class="node">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1539.1928,-1039.5002C1539.1928,-1039.5002 1152.9464,-1039.5002 1152.9464,-1039.5002 1146.9464,-1039.5002 1140.9464,-1033.5002 1140.9464,-1027.5002 1140.9464,-1027.5002 1140.9464,-964.8998 1140.9464,-964.8998 1140.9464,-958.8998 1146.9464,-952.8998 1152.9464,-952.8998 1152.9464,-952.8998 1539.1928,-952.8998 1539.1928,-952.8998 1545.1928,-952.8998 1551.1928,-958.8998 1551.1928,-964.8998 1551.1928,-964.8998 1551.1928,-1027.5002 1551.1928,-1027.5002 1551.1928,-1033.5002 1545.1928,-1039.5002 1539.1928,-1039.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">cmake@3.25.1%gcc@9.4.0/gguve5i</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1950.9968,-1111.6597C1829.5529,-1088.4802 1680.8338,-1060.0949 1561.2457,-1037.2697"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.7091,-1033.795 1551.2303,-1035.3581 1560.3967,-1040.6709 1561.7091,-1033.795"/>
|
||||
</g>
|
||||
<!-- i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="node2" class="node">
|
||||
<title>i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1536.3649,-86.7002C1536.3649,-86.7002 1155.7743,-86.7002 1155.7743,-86.7002 1149.7743,-86.7002 1143.7743,-80.7002 1143.7743,-74.7002 1143.7743,-74.7002 1143.7743,-12.0998 1143.7743,-12.0998 1143.7743,-6.0998 1149.7743,-.0998 1155.7743,-.0998 1155.7743,-.0998 1536.3649,-.0998 1536.3649,-.0998 1542.3649,-.0998 1548.3649,-6.0998 1548.3649,-12.0998 1548.3649,-12.0998 1548.3649,-74.7002 1548.3649,-74.7002 1548.3649,-80.7002 1542.3649,-86.7002 1536.3649,-86.7002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-36.2" font-family="Monaco" font-size="24.00" fill="#000000">pkgconf@1.8.0%gcc@9.4.0/i4avrin</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="node3" class="node">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M849.3673,-721.9002C849.3673,-721.9002 480.7719,-721.9002 480.7719,-721.9002 474.7719,-721.9002 468.7719,-715.9002 468.7719,-709.9002 468.7719,-709.9002 468.7719,-647.2998 468.7719,-647.2998 468.7719,-641.2998 474.7719,-635.2998 480.7719,-635.2998 480.7719,-635.2998 849.3673,-635.2998 849.3673,-635.2998 855.3673,-635.2998 861.3673,-641.2998 861.3673,-647.2998 861.3673,-647.2998 861.3673,-709.9002 861.3673,-709.9002 861.3673,-715.9002 855.3673,-721.9002 849.3673,-721.9002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">perl@5.36.0%gcc@9.4.0/ywrpvv2</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="node7" class="node">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M392.4016,-563.1002C392.4016,-563.1002 19.7376,-563.1002 19.7376,-563.1002 13.7376,-563.1002 7.7376,-557.1002 7.7376,-551.1002 7.7376,-551.1002 7.7376,-488.4998 7.7376,-488.4998 7.7376,-482.4998 13.7376,-476.4998 19.7376,-476.4998 19.7376,-476.4998 392.4016,-476.4998 392.4016,-476.4998 398.4016,-476.4998 404.4016,-482.4998 404.4016,-488.4998 404.4016,-488.4998 404.4016,-551.1002 404.4016,-551.1002 404.4016,-557.1002 398.4016,-563.1002 392.4016,-563.1002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">bzip2@1.0.8%gcc@9.4.0/h3ujmb3</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M539.3189,-636.1522C477.7157,-614.8394 403.4197,-589.1353 340.5959,-567.4002"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M539.9728,-634.2622C478.3696,-612.9494 404.0736,-587.2452 341.2498,-565.5101"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="341.9365,-563.1023 331.3417,-563.1403 339.6478,-569.7176 341.9365,-563.1023"/>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="node19" class="node">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1298.2296,-563.1002C1298.2296,-563.1002 937.9096,-563.1002 937.9096,-563.1002 931.9096,-563.1002 925.9096,-557.1002 925.9096,-551.1002 925.9096,-551.1002 925.9096,-488.4998 925.9096,-488.4998 925.9096,-482.4998 931.9096,-476.4998 937.9096,-476.4998 937.9096,-476.4998 1298.2296,-476.4998 1298.2296,-476.4998 1304.2296,-476.4998 1310.2296,-482.4998 1310.2296,-488.4998 1310.2296,-488.4998 1310.2296,-551.1002 1310.2296,-551.1002 1310.2296,-557.1002 1304.2296,-563.1002 1298.2296,-563.1002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">gdbm@1.23%gcc@9.4.0/uabgssx</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="edge44" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M788.523,-634.2635C849.3209,-612.9507 922.6457,-587.2465 984.6483,-565.5114"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M789.1847,-636.1509C849.9825,-614.8381 923.3073,-589.1339 985.3099,-567.3988"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="986.1559,-569.7515 994.435,-563.1403 983.8402,-563.1456 986.1559,-569.7515"/>
|
||||
</g>
|
||||
<!-- gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="node20" class="node">
|
||||
<title>gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M896.1744,-563.1002C896.1744,-563.1002 433.9648,-563.1002 433.9648,-563.1002 427.9648,-563.1002 421.9648,-557.1002 421.9648,-551.1002 421.9648,-551.1002 421.9648,-488.4998 421.9648,-488.4998 421.9648,-482.4998 427.9648,-476.4998 433.9648,-476.4998 433.9648,-476.4998 896.1744,-476.4998 896.1744,-476.4998 902.1744,-476.4998 908.1744,-482.4998 908.1744,-488.4998 908.1744,-488.4998 908.1744,-551.1002 908.1744,-551.1002 908.1744,-557.1002 902.1744,-563.1002 896.1744,-563.1002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="edge23" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M664.0696,-635.2072C664.0696,-616.1263 664.0696,-593.5257 664.0696,-573.4046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M666.0696,-635.2072C666.0696,-616.1263 666.0696,-593.5257 666.0696,-573.4046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="668.5697,-573.1403 665.0696,-563.1403 661.5697,-573.1404 668.5697,-573.1403"/>
|
||||
</g>
|
||||
<!-- nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="node24" class="node">
|
||||
<title>nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2195.2248,-563.1002C2195.2248,-563.1002 1840.9144,-563.1002 1840.9144,-563.1002 1834.9144,-563.1002 1828.9144,-557.1002 1828.9144,-551.1002 1828.9144,-551.1002 1828.9144,-488.4998 1828.9144,-488.4998 1828.9144,-482.4998 1834.9144,-476.4998 1840.9144,-476.4998 1840.9144,-476.4998 2195.2248,-476.4998 2195.2248,-476.4998 2201.2248,-476.4998 2207.2248,-482.4998 2207.2248,-488.4998 2207.2248,-488.4998 2207.2248,-551.1002 2207.2248,-551.1002 2207.2248,-557.1002 2201.2248,-563.1002 2195.2248,-563.1002"/>
|
||||
<text text-anchor="middle" x="2018.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">zlib@1.2.13%gcc@9.4.0/nizxi5u</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M861.3292,-654.5584C1116.9929,-624.5514 1561.4447,-572.3867 1818.5758,-542.2075"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M861.5624,-656.5447C1117.2261,-626.5378 1561.6778,-574.373 1818.8089,-544.1939"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1819.373,-546.6449 1828.8968,-542.003 1818.5569,-539.6926 1819.373,-546.6449"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id -->
|
||||
<g id="node4" class="node">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2383.212,-1674.7002C2383.212,-1674.7002 1972.9272,-1674.7002 1972.9272,-1674.7002 1966.9272,-1674.7002 1960.9272,-1668.7002 1960.9272,-1662.7002 1960.9272,-1662.7002 1960.9272,-1600.0998 1960.9272,-1600.0998 1960.9272,-1594.0998 1966.9272,-1588.0998 1972.9272,-1588.0998 1972.9272,-1588.0998 2383.212,-1588.0998 2383.212,-1588.0998 2389.212,-1588.0998 2395.212,-1594.0998 2395.212,-1600.0998 2395.212,-1600.0998 2395.212,-1662.7002 2395.212,-1662.7002 2395.212,-1668.7002 2389.212,-1674.7002 2383.212,-1674.7002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1624.2" font-family="Monaco" font-size="24.00" fill="#000000">strumpack@7.0.1%gcc@9.4.0/idvshq5</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge33" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2177.0696,-1587.8598C2177.0696,-1500.5185 2177.0696,-1304.1624 2177.0696,-1208.8885"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2179.0696,-1587.8598C2179.0696,-1500.5185 2179.0696,-1304.1624 2179.0696,-1208.8885"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2181.5697,-1208.611 2178.0696,-1198.611 2174.5697,-1208.611 2181.5697,-1208.611"/>
|
||||
<text text-anchor="middle" x="2125.9224" y="-1397.5399" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6199,-1629.1097C1600.5855,-1621.4505 897.1143,-1596.5054 662.748,-1516.9469 459.8544,-1447.9506 281.1117,-1289.236 401.2427,-1111.0377 418.213,-1086.3492 472.759,-1062.01 530.3793,-1041.9698"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.6625,-1627.1101C1600.6564,-1619.4517 897.1852,-1594.5067 663.3912,-1515.0531 461.1823,-1446.4551 282.4397,-1287.7405 402.8965,-1112.1623 419.028,-1088.1757 473.574,-1063.8364 531.0362,-1043.8589"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.0142,-1046.1665 540.3395,-1039.6137 529.7449,-1039.5445 532.0142,-1046.1665"/>
|
||||
<text text-anchor="middle" x="1175.5163" y="-1600.8866" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="node12" class="node">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M3003.3872,-1357.1002C3003.3872,-1357.1002 2606.752,-1357.1002 2606.752,-1357.1002 2600.752,-1357.1002 2594.752,-1351.1002 2594.752,-1345.1002 2594.752,-1345.1002 2594.752,-1282.4998 2594.752,-1282.4998 2594.752,-1276.4998 2600.752,-1270.4998 2606.752,-1270.4998 2606.752,-1270.4998 3003.3872,-1270.4998 3003.3872,-1270.4998 3009.3872,-1270.4998 3015.3872,-1276.4998 3015.3872,-1282.4998 3015.3872,-1282.4998 3015.3872,-1345.1002 3015.3872,-1345.1002 3015.3872,-1351.1002 3009.3872,-1357.1002 3003.3872,-1357.1002"/>
|
||||
<text text-anchor="middle" x="2805.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">parmetis@4.0.3%gcc@9.4.0/imopnxj</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="edge51" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2393.6993,-1587.0809C2455.3565,-1569.7539 2521.1771,-1546.2699 2577.5864,-1515.1245 2649.1588,-1475.6656 2717.4141,-1409.6691 2759.9512,-1363.9364"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2394.2404,-1589.0062C2456.0286,-1571.6376 2521.8491,-1548.1536 2578.5528,-1516.8755 2650.5491,-1477.1034 2718.8043,-1411.107 2761.4156,-1365.2986"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2763.3454,-1366.8938 2767.5512,-1357.1695 2758.1992,-1362.1485 2763.3454,-1366.8938"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="node13" class="node">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2928.3784,-1198.3002C2928.3784,-1198.3002 2563.7608,-1198.3002 2563.7608,-1198.3002 2557.7608,-1198.3002 2551.7608,-1192.3002 2551.7608,-1186.3002 2551.7608,-1186.3002 2551.7608,-1123.6998 2551.7608,-1123.6998 2551.7608,-1117.6998 2557.7608,-1111.6998 2563.7608,-1111.6998 2563.7608,-1111.6998 2928.3784,-1111.6998 2928.3784,-1111.6998 2934.3784,-1111.6998 2940.3784,-1117.6998 2940.3784,-1123.6998 2940.3784,-1123.6998 2940.3784,-1186.3002 2940.3784,-1186.3002 2940.3784,-1192.3002 2934.3784,-1198.3002 2928.3784,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2746.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">metis@5.1.0%gcc@9.4.0/ern66gy</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge25" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2371.6269,-1587.103C2443.5875,-1567.249 2513.691,-1542.0963 2537.3223,-1515.3355 2611.3482,-1433.6645 2525.4748,-1364.8484 2585.2274,-1269.8608 2602.2478,-1243.3473 2627.3929,-1221.1402 2652.8797,-1203.3777"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2372.1589,-1589.0309C2444.2629,-1569.1315 2514.3664,-1543.9788 2538.8169,-1516.6645 2612.5989,-1432.1038 2526.7255,-1363.2878 2586.9118,-1270.9392 2603.5717,-1244.8464 2628.7168,-1222.6393 2654.0229,-1205.0188"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2655.7411,-1206.8749 2662.0621,-1198.3722 2651.8184,-1201.0773 2655.7411,-1206.8749"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="node14" class="node">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1964.017,-1357.1002C1964.017,-1357.1002 1532.1222,-1357.1002 1532.1222,-1357.1002 1526.1222,-1357.1002 1520.1222,-1351.1002 1520.1222,-1345.1002 1520.1222,-1345.1002 1520.1222,-1282.4998 1520.1222,-1282.4998 1520.1222,-1276.4998 1526.1222,-1270.4998 1532.1222,-1270.4998 1532.1222,-1270.4998 1964.017,-1270.4998 1964.017,-1270.4998 1970.017,-1270.4998 1976.017,-1276.4998 1976.017,-1282.4998 1976.017,-1282.4998 1976.017,-1345.1002 1976.017,-1345.1002 1976.017,-1351.1002 1970.017,-1357.1002 1964.017,-1357.1002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">butterflypack@2.2.2%gcc@9.4.0/nqiyrxl</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="edge26" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2118.5874,-1588.7094C2039.1194,-1530.0139 1897.9154,-1425.72 1814.4793,-1364.0937"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2119.7757,-1587.1006C2040.3076,-1528.4052 1899.1036,-1424.1112 1815.6675,-1362.485"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1817.0581,-1360.404 1806.9348,-1357.2781 1812.8992,-1366.0347 1817.0581,-1360.404"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="node16" class="node">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1106.2192,-1515.9002C1106.2192,-1515.9002 683.92,-1515.9002 683.92,-1515.9002 677.92,-1515.9002 671.92,-1509.9002 671.92,-1503.9002 671.92,-1503.9002 671.92,-1441.2998 671.92,-1441.2998 671.92,-1435.2998 677.92,-1429.2998 683.92,-1429.2998 683.92,-1429.2998 1106.2192,-1429.2998 1106.2192,-1429.2998 1112.2192,-1429.2998 1118.2192,-1435.2998 1118.2192,-1441.2998 1118.2192,-1441.2998 1118.2192,-1503.9002 1118.2192,-1503.9002 1118.2192,-1509.9002 1112.2192,-1515.9002 1106.2192,-1515.9002"/>
|
||||
<text text-anchor="middle" x="895.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">slate@2022.07.00%gcc@9.4.0/4bu62ky</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6663,-1605.4991C1729.5518,-1576.8935 1365.2868,-1531.8075 1128.237,-1502.4673"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.912,-1603.5143C1729.7975,-1574.9086 1365.5325,-1529.8227 1128.4827,-1500.4825"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1128.5789,-1497.9754 1118.2247,-1500.2204 1127.719,-1504.9224 1128.5789,-1497.9754"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2395.1113,-1591.5061C2621.5772,-1545.7968 2953.3457,-1462.5053 3023.2362,-1356.6473 3049.986,-1316.785 3021.2047,-1131.5143 3003.3326,-1112.2759 2971.8969,-1077.7826 2884.3944,-1052.6467 2789.1441,-1034.9179"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2395.507,-1593.4665C2622.0642,-1547.7366 2953.8327,-1464.4452 3024.903,-1357.7527 3051.9623,-1316.478 3023.181,-1131.2073 3004.8066,-1110.9241 2972.4491,-1075.8603 2884.9466,-1050.7244 2789.5102,-1032.9517"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2789.9449,-1030.4898 2779.4781,-1032.132 2788.6845,-1037.3754 2789.9449,-1030.4898"/>
|
||||
<text text-anchor="middle" x="2611.7445" y="-1537.8321" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="node25" class="node">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1749.1952,-1515.9002C1749.1952,-1515.9002 1398.944,-1515.9002 1398.944,-1515.9002 1392.944,-1515.9002 1386.944,-1509.9002 1386.944,-1503.9002 1386.944,-1503.9002 1386.944,-1441.2998 1386.944,-1441.2998 1386.944,-1435.2998 1392.944,-1429.2998 1398.944,-1429.2998 1398.944,-1429.2998 1749.1952,-1429.2998 1749.1952,-1429.2998 1755.1952,-1429.2998 1761.1952,-1435.2998 1761.1952,-1441.2998 1761.1952,-1441.2998 1761.1952,-1503.9002 1761.1952,-1503.9002 1761.1952,-1509.9002 1755.1952,-1515.9002 1749.1952,-1515.9002"/>
|
||||
<text text-anchor="middle" x="1574.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">zfp@0.5.5%gcc@9.4.0/7rzbmgo</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="edge36" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2012.7697,-1588.9743C1930.7903,-1567.4208 1831.729,-1541.3762 1748.4742,-1519.4874"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2013.2782,-1587.0401C1931.2989,-1565.4866 1832.2376,-1539.442 1748.9827,-1517.5531"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1749.477,-1515.0982 1738.9157,-1515.9403 1747.697,-1521.8681 1749.477,-1515.0982"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2229.2864,-1587.9836C2336.2076,-1492.3172 2562.5717,-1260.0833 2429.0696,-1111.6 2372.2327,-1048.3851 1860.8259,-1017.0375 1561.5401,-1003.9799"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.5673,-1000.4779 1551.4253,-1003.5421 1561.2645,-1007.4714 1561.5673,-1000.4779"/>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="node5" class="node">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M912.4048,-1198.3002C912.4048,-1198.3002 475.7344,-1198.3002 475.7344,-1198.3002 469.7344,-1198.3002 463.7344,-1192.3002 463.7344,-1186.3002 463.7344,-1186.3002 463.7344,-1123.6998 463.7344,-1123.6998 463.7344,-1117.6998 469.7344,-1111.6998 475.7344,-1111.6998 475.7344,-1111.6998 912.4048,-1111.6998 912.4048,-1111.6998 918.4048,-1111.6998 924.4048,-1117.6998 924.4048,-1123.6998 924.4048,-1123.6998 924.4048,-1186.3002 924.4048,-1186.3002 924.4048,-1192.3002 918.4048,-1198.3002 912.4048,-1198.3002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">blaspp@2022.07.00%gcc@9.4.0/mujlx42</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M693.0696,-1111.6072C693.0696,-1092.5263 693.0696,-1069.9257 693.0696,-1049.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M695.0696,-1111.6072C695.0696,-1092.5263 695.0696,-1069.9257 695.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="697.5697,-1049.5403 694.0696,-1039.5403 690.5697,-1049.5404 697.5697,-1049.5403"/>
|
||||
<text text-anchor="middle" x="657.8516" y="-1079.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge28" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M872.2315,-1111.6072C960.9952,-1089.988 1068.311,-1063.8504 1158.3512,-1041.9204"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1159.2354,-1045.3074 1168.1232,-1039.5403 1157.5789,-1038.5062 1159.2354,-1045.3074"/>
|
||||
</g>
|
||||
<!-- htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="node6" class="node">
|
||||
<title>htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M2663.3553,-880.7002C2663.3553,-880.7002 2270.7839,-880.7002 2270.7839,-880.7002 2264.7839,-880.7002 2258.7839,-874.7002 2258.7839,-868.7002 2258.7839,-868.7002 2258.7839,-806.0998 2258.7839,-806.0998 2258.7839,-800.0998 2264.7839,-794.0998 2270.7839,-794.0998 2270.7839,-794.0998 2663.3553,-794.0998 2663.3553,-794.0998 2669.3553,-794.0998 2675.3553,-800.0998 2675.3553,-806.0998 2675.3553,-806.0998 2675.3553,-868.7002 2675.3553,-868.7002 2675.3553,-874.7002 2669.3553,-880.7002 2663.3553,-880.7002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">patchelf@0.16.1%gcc@9.4.0/htzjns6</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="node15" class="node">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M394.2232,-404.3002C394.2232,-404.3002 17.916,-404.3002 17.916,-404.3002 11.916,-404.3002 5.916,-398.3002 5.916,-392.3002 5.916,-392.3002 5.916,-329.6998 5.916,-329.6998 5.916,-323.6998 11.916,-317.6998 17.916,-317.6998 17.916,-317.6998 394.2232,-317.6998 394.2232,-317.6998 400.2232,-317.6998 406.2232,-323.6998 406.2232,-329.6998 406.2232,-329.6998 406.2232,-392.3002 406.2232,-392.3002 406.2232,-398.3002 400.2232,-404.3002 394.2232,-404.3002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">diffutils@3.8%gcc@9.4.0/xm3ldz3</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M206.0696,-476.4072C206.0696,-457.3263 206.0696,-434.7257 206.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-414.3403 206.0696,-404.3403 202.5697,-414.3404 209.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M690.0981,-952.705C684.8522,-895.2533 675.6173,-794.1153 669.9514,-732.0637"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="673.4345,-731.7184 669.0396,-722.0781 666.4635,-732.355 673.4345,-731.7184"/>
|
||||
</g>
|
||||
<!-- 4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="node9" class="node">
|
||||
<title>4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1977.9121,-721.9002C1977.9121,-721.9002 1386.2271,-721.9002 1386.2271,-721.9002 1380.2271,-721.9002 1374.2271,-715.9002 1374.2271,-709.9002 1374.2271,-709.9002 1374.2271,-647.2998 1374.2271,-647.2998 1374.2271,-641.2998 1380.2271,-635.2998 1386.2271,-635.2998 1386.2271,-635.2998 1977.9121,-635.2998 1977.9121,-635.2998 1983.9121,-635.2998 1989.9121,-641.2998 1989.9121,-647.2998 1989.9121,-647.2998 1989.9121,-709.9002 1989.9121,-709.9002 1989.9121,-715.9002 1983.9121,-721.9002 1977.9121,-721.9002"/>
|
||||
<text text-anchor="middle" x="1682.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="node10" class="node">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M988.1824,-1357.1002C988.1824,-1357.1002 533.9568,-1357.1002 533.9568,-1357.1002 527.9568,-1357.1002 521.9568,-1351.1002 521.9568,-1345.1002 521.9568,-1345.1002 521.9568,-1282.4998 521.9568,-1282.4998 521.9568,-1276.4998 527.9568,-1270.4998 533.9568,-1270.4998 533.9568,-1270.4998 988.1824,-1270.4998 988.1824,-1270.4998 994.1824,-1270.4998 1000.1824,-1276.4998 1000.1824,-1282.4998 1000.1824,-1282.4998 1000.1824,-1345.1002 1000.1824,-1345.1002 1000.1824,-1351.1002 994.1824,-1357.1002 988.1824,-1357.1002"/>
|
||||
<text text-anchor="middle" x="761.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">lapackpp@2022.07.00%gcc@9.4.0/xiro2z6</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge37" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M741.8402,-1270.7959C733.6789,-1251.4525 723.9915,-1228.4917 715.4149,-1208.1641"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M743.6829,-1270.0185C735.5216,-1250.675 725.8342,-1227.7143 717.2576,-1207.3866"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="719.4676,-1206.1933 712.3555,-1198.3403 713.0181,-1208.9144 719.4676,-1206.1933"/>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge35" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M597.2326,-1271.3826C534.1471,-1251.0571 472.8527,-1225.5904 454.2471,-1198.9688 432.1275,-1166.6075 433.5639,-1144.2113 454.2226,-1111.0684 472.6194,-1081.8657 500.3255,-1060.004 530.6572,-1043.4601"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M597.8458,-1269.4789C534.9144,-1249.2102 473.6201,-1223.7435 455.8921,-1197.8312 434.1234,-1166.7355 435.5598,-1144.3393 455.9166,-1112.1316 473.8583,-1083.4358 501.5644,-1061.5741 531.6142,-1045.2163"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.9062,-1047.362 540.1422,-1039.6231 529.6595,-1041.1605 532.9062,-1047.362"/>
|
||||
<text text-anchor="middle" x="474.3109" y="-1250.2598" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge45" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M833.5823,-1270.3956C865.3249,-1250.0918 902.2709,-1224.6296 933.0696,-1198.4 973.2414,-1164.1878 969.8532,-1140.395 1014.0696,-1111.6 1058.5051,-1082.6623 1111.0286,-1060.0733 1161.029,-1042.8573"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1162.313,-1046.1177 1170.6621,-1039.5953 1160.0678,-1039.4876 1162.313,-1046.1177"/>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="node11" class="node">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1527.3625,-245.5002C1527.3625,-245.5002 1164.7767,-245.5002 1164.7767,-245.5002 1158.7767,-245.5002 1152.7767,-239.5002 1152.7767,-233.5002 1152.7767,-233.5002 1152.7767,-170.8998 1152.7767,-170.8998 1152.7767,-164.8998 1158.7767,-158.8998 1164.7767,-158.8998 1164.7767,-158.8998 1527.3625,-158.8998 1527.3625,-158.8998 1533.3625,-158.8998 1539.3625,-164.8998 1539.3625,-170.8998 1539.3625,-170.8998 1539.3625,-233.5002 1539.3625,-233.5002 1539.3625,-239.5002 1533.3625,-245.5002 1527.3625,-245.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">ncurses@6.4%gcc@9.4.0/j5rupoq</text>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-158.8072C1346.0696,-139.7263 1346.0696,-117.1257 1346.0696,-97.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-96.7403 1346.0696,-86.7403 1342.5697,-96.7404 1349.5697,-96.7403"/>
|
||||
<text text-anchor="middle" x="1292.7436" y="-127.0482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=pkgconfig</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2788.0102,-1270.7555C2780.8234,-1251.412 2772.2926,-1228.4513 2764.7402,-1208.1236"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2789.885,-1270.0589C2782.6982,-1250.7155 2774.1674,-1227.7547 2766.615,-1207.4271"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2768.9358,-1206.4953 2762.1721,-1198.3403 2762.3741,-1208.9332 2768.9358,-1206.4953"/>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2907.2846,-1269.5018C2936.475,-1251.8137 2964.9158,-1228.1116 2981.1904,-1197.9236 2999.477,-1164.2363 3005.2125,-1141.4693 2981.289,-1112.225 2954.5472,-1078.5579 2876.5297,-1053.8974 2789.2983,-1036.3535"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2908.3216,-1271.2119C2937.7554,-1253.3501 2966.1962,-1229.648 2982.9488,-1198.8764 3001.4164,-1164.7249 3007.1519,-1141.9579 2982.8502,-1110.975 2955.15,-1076.6509 2877.1325,-1051.9904 2789.6927,-1034.3928"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2790.125,-1031.93 2779.6364,-1033.4269 2788.7692,-1038.7974 2790.125,-1031.93"/>
|
||||
<text text-anchor="middle" x="2836.0561" y="-1059.5023" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge49" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2883.731,-1270.4691C2909.4451,-1251.9243 2934.9956,-1227.7144 2949.0696,-1198.4 2965.7663,-1163.6227 2975.3506,-1139.841 2949.0696,-1111.6 2925.7161,-1086.5049 1993.0368,-1031.9055 1561.3071,-1007.9103"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.3813,-1004.4092 1551.2026,-1007.3492 1560.9931,-1011.3984 1561.3813,-1004.4092"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge50" class="edge">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2551.6031,-1113.7387C2547.0531,-1112.9948 2542.537,-1112.2802 2538.0696,-1111.6 2198.5338,-1059.8997 1800.8632,-1026.8711 1561.4583,-1009.9443"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.4619,-1006.436 1551.2407,-1009.2249 1560.9702,-1013.4187 1561.4619,-1006.436"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge34" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1865.2226,-1269.4691C1922.6966,-1248.2438 1991.964,-1222.6632 2050.6644,-1200.985"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1865.9154,-1271.3453C1923.3894,-1250.12 1992.6569,-1224.5394 2051.3572,-1202.8612"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2052.5441,-1205.088 2060.7123,-1198.3403 2050.119,-1198.5215 2052.5441,-1205.088"/>
|
||||
<text text-anchor="middle" x="1910.9073" y="-1238.6056" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge52" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1519.9696,-1290.6844C1394.6018,-1273.3057 1237.6631,-1244.7294 1102.7507,-1199.3478 1021.8138,-1171.8729 1008.1992,-1149.8608 932.6248,-1112.4956 887.1715,-1089.9216 836.578,-1065.4054 793.6914,-1044.8018"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1520.2442,-1288.7034C1394.9601,-1271.3381 1238.0214,-1242.7618 1103.3885,-1197.4522 1023.5148,-1170.8208 1009.9002,-1148.8087 933.5144,-1110.7044 888.0436,-1088.1218 837.4502,-1063.6056 794.5574,-1042.999"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="795.6235,-1040.7377 785.0938,-1039.565 792.5939,-1047.0482 795.6235,-1040.7377"/>
|
||||
<text text-anchor="middle" x="1046.8307" y="-1202.5988" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="node21" class="node">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1547.9922,-1198.3002C1547.9922,-1198.3002 1144.147,-1198.3002 1144.147,-1198.3002 1138.147,-1198.3002 1132.147,-1192.3002 1132.147,-1186.3002 1132.147,-1186.3002 1132.147,-1123.6998 1132.147,-1123.6998 1132.147,-1117.6998 1138.147,-1111.6998 1144.147,-1111.6998 1144.147,-1111.6998 1547.9922,-1111.6998 1547.9922,-1111.6998 1553.9922,-1111.6998 1559.9922,-1117.6998 1559.9922,-1123.6998 1559.9922,-1123.6998 1559.9922,-1186.3002 1559.9922,-1186.3002 1559.9922,-1192.3002 1553.9922,-1198.3002 1547.9922,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">arpack-ng@3.8.0%gcc@9.4.0/lfh3aov</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="edge46" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1637.8539,-1271.3373C1584.2332,-1250.1557 1519.6324,-1224.6368 1464.827,-1202.9873"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1638.5887,-1269.4771C1584.968,-1248.2956 1520.3672,-1222.7767 1465.5618,-1201.1272"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1466.3716,-1198.7592 1455.785,-1198.3403 1463.7998,-1205.2696 1466.3716,-1198.7592"/>
|
||||
</g>
|
||||
<!-- 57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="node22" class="node">
|
||||
<title>57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1906.2352,-1198.3002C1906.2352,-1198.3002 1589.904,-1198.3002 1589.904,-1198.3002 1583.904,-1198.3002 1577.904,-1192.3002 1577.904,-1186.3002 1577.904,-1186.3002 1577.904,-1123.6998 1577.904,-1123.6998 1577.904,-1117.6998 1583.904,-1111.6998 1589.904,-1111.6998 1589.904,-1111.6998 1906.2352,-1111.6998 1906.2352,-1111.6998 1912.2352,-1111.6998 1918.2352,-1117.6998 1918.2352,-1123.6998 1918.2352,-1123.6998 1918.2352,-1186.3002 1918.2352,-1186.3002 1918.2352,-1192.3002 1912.2352,-1198.3002 1906.2352,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">sed@4.8%gcc@9.4.0/57joith</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="edge27" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1748.0696,-1270.4072C1748.0696,-1251.3263 1748.0696,-1228.7257 1748.0696,-1208.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1751.5697,-1208.3403 1748.0696,-1198.3403 1744.5697,-1208.3404 1751.5697,-1208.3403"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge24" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1975.9734,-1301.684C2148.2819,-1288.3961 2365.6859,-1259.5384 2428.3689,-1197.6866 2466.9261,-1160.1438 2472.9783,-1095.7153 2471.5152,-1049.9701"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1976.1272,-1303.678C2148.5451,-1290.3788 2365.949,-1261.521 2429.7703,-1199.1134 2468.9173,-1160.3309 2474.9695,-1095.9024 2473.5142,-1049.9065"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2476.0078,-1049.7027 2472.0657,-1039.8686 2469.0147,-1050.0146 2476.0078,-1049.7027"/>
|
||||
<text text-anchor="middle" x="2207.8884" y="-1273.0053" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1520.1614,-1301.6771C1362.9712,-1287.992 1173.582,-1259.0928 1123.0696,-1198.4 1098.3914,-1168.7481 1103.0165,-1144.5563 1123.0696,-1111.6 1140.5998,-1082.79 1167.9002,-1060.8539 1197.4647,-1044.2681"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1199.1408,-1047.3408 1206.2789,-1039.5114 1195.8163,-1041.1806 1199.1408,-1047.3408"/>
|
||||
</g>
|
||||
<!-- ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="node18" class="node">
|
||||
<title>ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M400.2088,-245.5002C400.2088,-245.5002 11.9304,-245.5002 11.9304,-245.5002 5.9304,-245.5002 -.0696,-239.5002 -.0696,-233.5002 -.0696,-233.5002 -.0696,-170.8998 -.0696,-170.8998 -.0696,-164.8998 5.9304,-158.8998 11.9304,-158.8998 11.9304,-158.8998 400.2088,-158.8998 400.2088,-158.8998 406.2088,-158.8998 412.2088,-164.8998 412.2088,-170.8998 412.2088,-170.8998 412.2088,-233.5002 412.2088,-233.5002 412.2088,-239.5002 406.2088,-245.5002 400.2088,-245.5002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">libiconv@1.17%gcc@9.4.0/ogcucq2</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="edge47" class="edge">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M205.0696,-317.6072C205.0696,-298.5263 205.0696,-275.9257 205.0696,-255.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M207.0696,-317.6072C207.0696,-298.5263 207.0696,-275.9257 207.0696,-255.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-255.5403 206.0696,-245.5403 202.5697,-255.5404 209.5697,-255.5403"/>
|
||||
<text text-anchor="middle" x="165.5739" y="-285.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=iconv</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge42" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M672.6614,-1430.2151C600.7916,-1411.3548 534.1254,-1386.9583 512.2667,-1357.7962 489.0909,-1326.029 493.54,-1304.0273 512.1928,-1269.9192 527.5256,-1242.0821 552.3382,-1220.1508 578.9347,-1203.0434"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M673.169,-1428.2806C601.4789,-1409.4766 534.8127,-1385.0802 513.8725,-1356.6038 491.0512,-1326.4254 495.5003,-1304.4237 513.9464,-1270.8808 528.8502,-1243.5806 553.6627,-1221.6493 580.016,-1204.7259"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="581.46,-1206.7724 588.1193,-1198.532 577.7747,-1200.8211 581.46,-1206.7724"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge43" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M680.4783,-1430.2246C600.8632,-1410.3933 522.8724,-1385.2921 493.3877,-1357.9314 411.1392,-1281.1573 374.1678,-1206.1582 435.2305,-1111.0561 454.3431,-1081.6726 482.5021,-1059.8261 513.5088,-1043.3725"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M680.9617,-1428.2839C601.476,-1408.4895 523.4851,-1383.3883 494.7515,-1356.4686 412.9331,-1280.273 375.9616,-1205.2739 436.9087,-1112.1439 455.569,-1083.2528 483.728,-1061.4063 514.4455,-1045.1396"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="515.8631,-1047.2236 523.1893,-1039.5699 512.6893,-1040.9844 515.8631,-1047.2236"/>
|
||||
<text text-anchor="middle" x="453.0969" y="-1356.92" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="edge38" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M857.6892,-1429.8521C840.9235,-1409.9835 820.9375,-1386.2985 803.4466,-1365.5705"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M859.2178,-1428.5623C842.4521,-1408.6937 822.466,-1385.0087 804.9751,-1364.2807"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="806.7654,-1362.5258 797.6414,-1357.1403 801.4156,-1367.0402 806.7654,-1362.5258"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1118.1783,-1450.5735C1412.4221,-1422.447 1902.6188,-1374.0528 1984.8578,-1356.2227 2203.916,-1308.9943 2329.6342,-1377.1305 2461.2658,-1197.8052 2492.3675,-1156.1664 2488.743,-1094.1171 2480.3694,-1050.0521"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1118.3686,-1452.5644C1412.6186,-1424.4374 1902.8153,-1376.0432 1985.2814,-1358.1773 2202.963,-1310.7526 2328.6812,-1378.8889 2462.8734,-1198.9948 2494.3641,-1156.0498 2490.7395,-1094.0005 2482.3343,-1049.6791"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2484.7438,-1048.9818 2479.3189,-1039.8812 2477.8845,-1050.3784 2484.7438,-1048.9818"/>
|
||||
<text text-anchor="middle" x="1820.4407" y="-1379.7188" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge32" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M947.2173,-1428.5496C968.7089,-1408.5917 992.2747,-1383.3345 1008.2117,-1356.6861 1067.0588,-1259.8646 1008.3745,-1197.6371 1084.3226,-1110.9351 1110.3076,-1081.7965 1144.7149,-1059.7578 1180.1804,-1043.0531"/>
|
||||
<path fill="none" stroke="#daa520" stroke-width="2" d="M948.5783,-1430.0151C970.1712,-1409.9561 993.737,-1384.6989 1009.9275,-1357.7139 1068.5139,-1258.4924 1009.8295,-1196.2649 1085.8166,-1112.2649 1111.3864,-1083.4807 1145.7936,-1061.442 1181.0322,-1044.8626"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1182.4567,-1046.9607 1190.1008,-1039.6246 1179.5503,-1040.5926 1182.4567,-1046.9607"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="node17" class="node">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1822.3657,-880.7002C1822.3657,-880.7002 1437.7735,-880.7002 1437.7735,-880.7002 1431.7735,-880.7002 1425.7735,-874.7002 1425.7735,-868.7002 1425.7735,-868.7002 1425.7735,-806.0998 1425.7735,-806.0998 1425.7735,-800.0998 1431.7735,-794.0998 1437.7735,-794.0998 1437.7735,-794.0998 1822.3657,-794.0998 1822.3657,-794.0998 1828.3657,-794.0998 1834.3657,-800.0998 1834.3657,-806.0998 1834.3657,-806.0998 1834.3657,-868.7002 1834.3657,-868.7002 1834.3657,-874.7002 1828.3657,-880.7002 1822.3657,-880.7002"/>
|
||||
<text text-anchor="middle" x="1630.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">openssl@1.1.1s%gcc@9.4.0/5xerf6i</text>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1425.7129,-803.7711C1262.7545,-776.9548 1035.5151,-739.5603 871.9084,-712.6373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="872.1525,-709.1305 861.7169,-710.9602 871.0158,-716.0376 872.1525,-709.1305"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="edge48" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1644.2788,-794.0072C1650.5843,-774.7513 1658.0636,-751.9107 1664.6976,-731.6514"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1668.0917,-732.533 1667.8776,-721.9403 1661.4393,-730.3546 1668.0917,-732.533"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge41" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1834.3289,-793.5645C1906.6817,-774.1673 1975.9199,-749.2273 1998.2925,-721.3707 2031.5218,-680.681 2032.1636,-617.9031 2027.044,-573.3921"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1834.8468,-795.4962C1907.3595,-776.0489 1976.5977,-751.1089 1999.8467,-722.6293 2033.5217,-680.7015 2034.1635,-617.9235 2029.0309,-573.1639"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2031.4885,-572.6712 2026.7474,-563.1964 2024.5451,-573.5598 2031.4885,-572.6712"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="node26" class="node">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1306.1776,-404.3002C1306.1776,-404.3002 929.9616,-404.3002 929.9616,-404.3002 923.9616,-404.3002 917.9616,-398.3002 917.9616,-392.3002 917.9616,-392.3002 917.9616,-329.6998 917.9616,-329.6998 917.9616,-323.6998 923.9616,-317.6998 929.9616,-317.6998 929.9616,-317.6998 1306.1776,-317.6998 1306.1776,-317.6998 1312.1776,-317.6998 1318.1776,-323.6998 1318.1776,-329.6998 1318.1776,-329.6998 1318.1776,-392.3002 1318.1776,-392.3002 1318.1776,-398.3002 1312.1776,-404.3002 1306.1776,-404.3002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">readline@8.2%gcc@9.4.0/v32wejd</text>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1117.0696,-476.4072C1117.0696,-457.3263 1117.0696,-434.7257 1117.0696,-414.6046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1119.0696,-476.4072C1119.0696,-457.3263 1119.0696,-434.7257 1119.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1121.5697,-414.3403 1118.0696,-404.3403 1114.5697,-414.3404 1121.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1167.6711,-1112.5788C1078.9073,-1090.9596 971.5916,-1064.822 881.5513,-1042.892"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1168.1444,-1110.6356C1079.3806,-1089.0165 972.0649,-1062.8788 882.0246,-1040.9488"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="882.5603,-1038.5062 872.016,-1039.5403 880.9038,-1045.3074 882.5603,-1038.5062"/>
|
||||
<text text-anchor="middle" x="963.904" y="-1079.817" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge31" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1559.7922,-1112.1043C1562.8511,-1111.5975 1565.8904,-1111.1002 1568.9103,-1110.6128 1759.2182,-1079.8992 1973.2397,-1052.1328 2144.6143,-1031.5343"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1560.1191,-1114.0774C1563.1741,-1113.5712 1566.2134,-1113.0739 1569.2289,-1112.5872 1759.4755,-1081.8826 1973.497,-1054.1161 2144.8529,-1033.52"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2145.1529,-1036.002 2154.6648,-1031.3357 2144.3191,-1029.0518 2145.1529,-1036.002"/>
|
||||
<text text-anchor="middle" x="1828.178" y="-1072.4692" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-1111.6072C1346.0696,-1092.5263 1346.0696,-1069.9257 1346.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-1049.5403 1346.0696,-1039.5403 1342.5697,-1049.5404 1349.5697,-1049.5403"/>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="edge30" class="edge">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2467.0696,-952.8072C2467.0696,-933.7263 2467.0696,-911.1257 2467.0696,-891.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2470.5697,-890.7403 2467.0696,-880.7403 2463.5697,-890.7404 2470.5697,-890.7403"/>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1422.351,-1429.2133C1312.2528,-1388.8872 1171.1589,-1316.8265 1103.0696,-1198.4 1083.8409,-1164.956 1082.4563,-1144.2088 1103.0696,-1111.6 1121.4102,-1082.5864 1149.2483,-1060.7204 1179.6189,-1044.2895"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1181.4205,-1047.2977 1188.6801,-1039.5809 1178.1927,-1041.0863 1181.4205,-1047.2977"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge39" class="edge">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1179.8001,-316.7866C1209.2065,-296.3053 1244.4355,-271.7686 1274.8343,-250.5961"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1180.9431,-318.4278C1210.3495,-297.9465 1245.5785,-273.4098 1275.9774,-252.2373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1277.6375,-254.1277 1283.8429,-245.5403 1273.6367,-248.3836 1277.6375,-254.1277"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1345.0696,-952.7909C1345.0696,-891.6316 1345.0696,-776.6094 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-519.8 1345.0696,-426.9591 1345.0696,-318.8523 1345.0696,-255.7237"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1347.0696,-952.7909C1347.0696,-891.6316 1347.0696,-776.6094 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-519.8 1347.0696,-426.9591 1347.0696,-318.8523 1347.0696,-255.7237"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-255.6091 1346.0696,-245.6091 1342.5697,-255.6092 1349.5697,-255.6091"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="edge40" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1423.1858,-951.9344C1460.2844,-931.1905 1504.8229,-906.2866 1543.0151,-884.9312"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1424.1619,-953.68C1461.2605,-932.9361 1505.799,-908.0322 1543.9912,-886.6769"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1545.5391,-888.6757 1552.5592,-880.7403 1542.1228,-882.5659 1545.5391,-888.6757"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 58 KiB |
@@ -54,9 +54,16 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Links
|
||||
|
||||
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
|
||||
Packages (packages.spack.io) <https://packages.spack.io>
|
||||
Binaries (binaries.spack.io) <https://cache.spack.io>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Reference
|
||||
@@ -72,7 +79,6 @@ or refer to the full manual below.
|
||||
repositories
|
||||
binary_caches
|
||||
command_index
|
||||
package_list
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _package-list:
|
||||
|
||||
============
|
||||
Package List
|
||||
============
|
||||
|
||||
This is a list of things you can install using Spack. It is
|
||||
automatically generated based on the packages in this Spack
|
||||
version.
|
||||
|
||||
.. raw:: html
|
||||
:file: package_list.html
|
||||
@@ -1549,7 +1549,7 @@ its value:
|
||||
|
||||
def configure_args(self):
|
||||
...
|
||||
if "+shared" in self.spec:
|
||||
if self.spec.satisfies("+shared"):
|
||||
extra_args.append("--enable-shared")
|
||||
else:
|
||||
extra_args.append("--disable-shared")
|
||||
@@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if "languages=jit" in spec:
|
||||
if spec.satisfies("languages=jit"):
|
||||
options.append("--enable-host-shared")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""
|
||||
@@ -2352,7 +2352,7 @@ the following at the command line of a bash shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2557,9 +2557,10 @@ Conditional dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You may have a package that only requires a dependency under certain
|
||||
conditions. For example, you may have a package that has optional MPI support,
|
||||
- MPI is only a dependency when you want to enable MPI support for the
|
||||
package. In that case, you could say something like:
|
||||
conditions. For example, you may have a package with optional MPI support.
|
||||
You would then provide a variant to reflect that the feature is optional
|
||||
and specify the MPI dependency only applies when MPI support is enabled.
|
||||
In that case, you could say something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2567,13 +2568,39 @@ package. In that case, you could say something like:
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
``when`` can include constraints on the variant, version, compiler, etc. and
|
||||
the :mod:`syntax<spack.spec>` is the same as for Specs written on the command
|
||||
line.
|
||||
|
||||
If a dependency/feature of a package isn't typically used, you can save time
|
||||
by making it conditional (since Spack will not build the dependency unless it
|
||||
is required for the Spec).
|
||||
Suppose the above package also has, since version 3, optional `Trilinos`
|
||||
support and you want them both to build either with or without MPI. Further
|
||||
suppose you require a version of `Trilinos` no older than 12.6. In that case,
|
||||
the `trilinos` variant and dependency directives would be:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("trilinos", default=False, description="Enable Trilinos support")
|
||||
|
||||
depends_on("trilinos@12.6:", when="@3: +trilinos")
|
||||
depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi")
|
||||
|
||||
|
||||
Alternatively, you could use the `when` context manager to equivalently specify
|
||||
the `trilinos` variant dependencies as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with when("@3: +trilinos"):
|
||||
depends_on("trilinos@12.6:")
|
||||
depends_on("trilinos +mpi", when="+mpi")
|
||||
|
||||
|
||||
The argument to ``when`` in either case can include any Spec constraints that
|
||||
are supported on the command line using the same :ref:`syntax <sec-specs>`.
|
||||
|
||||
.. note::
|
||||
|
||||
If a dependency isn't typically used, you can save time by making it
|
||||
conditional since Spack will not build the dependency unless it is
|
||||
required for the Spec.
|
||||
|
||||
|
||||
.. _dependency_dependency_patching:
|
||||
|
||||
@@ -2661,60 +2688,6 @@ appear in the package file (or in this case, in the list).
|
||||
right version. If two packages depend on ``binutils`` patched *the
|
||||
same* way, they can both use a single installation of ``binutils``.
|
||||
|
||||
.. _setup-dependent-environment:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Influence how dependents are built or run
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages
|
||||
that depend on a particular Qt installation will find it.
|
||||
Another good example of how a dependency can influence
|
||||
the build environment of dependents is the Python package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
In the method above it is ensured that any package that depends on Python
|
||||
will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows most python packages to have a very simple install procedure,
|
||||
like the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
setup_py("install", "--prefix={0}".format(prefix))
|
||||
|
||||
Finally the Python package takes also care of the modifications to ``PYTHONPATH``
|
||||
to allow dependencies to run correctly:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_run_environment
|
||||
:linenos:
|
||||
|
||||
|
||||
.. _packaging_conflicts:
|
||||
|
||||
@@ -2859,6 +2832,70 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
extends("python", when="+python")
|
||||
...
|
||||
|
||||
.. _setup-environment:
|
||||
|
||||
--------------------------------------------
|
||||
Runtime and build time environment variables
|
||||
--------------------------------------------
|
||||
|
||||
Spack provides a few methods to help package authors set up the required environment variables for
|
||||
their package. Environment variables typically depend on how the package is used: variables that
|
||||
make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
|
||||
installation will find it.
|
||||
|
||||
The following diagram will give you an idea when each of these methods is called in a build
|
||||
context:
|
||||
|
||||
.. image:: images/setup_env.png
|
||||
:align: center
|
||||
|
||||
Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
|
||||
dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
|
||||
This means that the former should only be used if the environment variables depend on the dependent
|
||||
package, whereas the latter should be used if the environment variables depend only on the package
|
||||
itself.
|
||||
|
||||
--------------------------------
|
||||
Setting package module variables
|
||||
--------------------------------
|
||||
|
||||
Apart from modifying environment variables of the dependent package, you can also define Python
|
||||
variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows Python packages to directly use these variables:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
install("script.py", python_platlib)
|
||||
|
||||
.. note::
|
||||
|
||||
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
|
||||
global variables are coming from when editing a ``package.py`` file.
|
||||
|
||||
-----
|
||||
Views
|
||||
-----
|
||||
@@ -2937,6 +2974,33 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
|
||||
can be used to satisfy the dependency of any package that
|
||||
``depends_on("mpi")``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Providing multiple virtuals simultaneously
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
|
||||
there are subsets of those virtuals that need to be provided together by the same package.
|
||||
|
||||
A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
|
||||
library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
|
||||
``openblas`` at all. It cannot pick one or the other.
|
||||
|
||||
To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
provides('blas', 'lapack')
|
||||
|
||||
This makes it impossible to select ``openblas`` as a provider for one of the two
|
||||
virtual dependencies and not for the other. If you try to, Spack will report an error:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
|
||||
==> Error: concretization failed for the following reasons:
|
||||
|
||||
1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Versioned Interfaces
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3501,7 +3565,7 @@ need to override methods like ``configure_args``:
|
||||
|
||||
def configure_args(self):
|
||||
args = ["--enable-cxx"] + self.enable_or_disable("libs")
|
||||
if "libs=static" in self.spec:
|
||||
if self.spec.satisfies("libs=static"):
|
||||
args.append("--with-pic")
|
||||
return args
|
||||
|
||||
@@ -3635,7 +3699,8 @@ regardless of the build system. The arguments for the phase are:
|
||||
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
||||
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
||||
|
||||
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -3645,56 +3710,6 @@ If the ``package.py`` encodes builders explicitly, the signature for a phase cha
|
||||
|
||||
In this case the package is passed as the second argument, and ``self`` is the builder instance.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are cases where a software actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases natively, if a recipe is written using builders explicitly.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
|
||||
will likely contain some overriding of default builder methods:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
In more complex cases it might happen that the build system changes according to certain conditions,
|
||||
for instance across versions. That can be expressed with conditional variant values:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Mixin base classes
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -3741,6 +3756,106 @@ for instance:
|
||||
|
||||
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
----------------------
|
||||
Multiple build systems
|
||||
----------------------
|
||||
|
||||
There are cases where a package actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases by splitting the build instructions into separate builder classes.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
variant("my_feature", default=True)
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default.
|
||||
|
||||
Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
In this example, ``spack install example +feature build_sytem=cmake`` will
|
||||
pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``.
|
||||
|
||||
Similarly, ``spack install example +feature build_system=autotools`` will pick
|
||||
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
|
||||
|
||||
Dependencies are always specified in the package class. When some dependencies
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
# Runtime dependencies
|
||||
depends_on("ncurses")
|
||||
depends_on("libxml2")
|
||||
|
||||
# Lowerbounds for cmake only apply when using cmake as the build system
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.18:", when="@2.0:", type="build")
|
||||
depends_on("cmake@3:", type="build")
|
||||
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
with when("build_system=autotools"):
|
||||
depends_on("perl", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
Very often projects switch from one build system to another, or add support
|
||||
for a new build system from a certain version, which means that the choice
|
||||
of the build system typically depends on a version range. Those situations can
|
||||
be handled by using conditional values in the ``build_system`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
The ``build_system`` can be used as an ordinary variant, which also means that it can
|
||||
be used in ``depends_on`` statements. This can be useful when a package *requires* that
|
||||
its dependency has a CMake config file, meaning that the dependent can only build when the
|
||||
dependency is built with CMake, and not Autotools. In that case, you can force the choice
|
||||
of the build system in the dependent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Dependent(CMakePackage):
|
||||
|
||||
depends_on("example build_system=cmake")
|
||||
|
||||
|
||||
.. _install-environment:
|
||||
|
||||
-----------------------
|
||||
@@ -4313,7 +4428,7 @@ for supported features, for instance:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if "avx512" in spec.target:
|
||||
if spec.satisfies("target=avx512"):
|
||||
args.append("--with-avx512")
|
||||
|
||||
The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding
|
||||
@@ -6748,3 +6863,63 @@ To achieve backward compatibility with the single-class format Spack creates in
|
||||
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
|
||||
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
|
||||
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
|
||||
|
||||
------------------------------
|
||||
Specifying License Information
|
||||
------------------------------
|
||||
|
||||
Most of the software in Spack is open source, and most open source software is released
|
||||
under one or more `common open source licenses <https://opensource.org/licenses/>`_.
|
||||
Specifying the license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the `SPDX identifier
|
||||
<https://spdx.org/licenses/>`_ for a project and then add it using the license
|
||||
directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<SPDX Identifier HERE>")
|
||||
|
||||
For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
|
||||
so you'd write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
|
||||
<https://spdx.github.io/spdx-spec/v2-draft/SPDX-license-expressions/>`_ with both of its
|
||||
licenses:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0 OR MIT")
|
||||
|
||||
Note that specifying a license without a when clause makes it apply to all
|
||||
versions and variants of the package, which might not actually be the case.
|
||||
For example, a project might have switched licenses at some point or have
|
||||
certain build configurations that include files that are licensed differently.
|
||||
Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
|
||||
in version ``0.12`` in 2018.
|
||||
|
||||
You can specify when a ``license()`` directive applies using with a ``when=``
|
||||
clause, just like other directives. For example, to specify that a specific
|
||||
license identifier should only apply to versions up to ``0.11``, but another
|
||||
license should apply for later versions, you could write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("LGPL-2.1", when="@:0.11")
|
||||
license("Apache-2.0 OR MIT", when="@0.12:")
|
||||
|
||||
Note that unlike for most other directives, the ``when=`` constraints in the
|
||||
``license()`` directive can't intersect. Spack needs to be able to resolve
|
||||
exactly one license identifier expression for any given version. To specify
|
||||
*multiple* licenses, use SPDX expressions and operators as above. The operators
|
||||
you probably care most about are:
|
||||
|
||||
* ``OR``: user chooses one license to adhere to; and
|
||||
* ``AND``: user has to adhere to all the licenses.
|
||||
|
||||
You may also care about `license exceptions
|
||||
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
|
||||
e.g. ``Apache-2.0 WITH LLVM-exception``.
|
||||
|
||||
@@ -213,6 +213,16 @@ pipeline jobs.
|
||||
``spack ci generate``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Throughout this documentation, references to the "mirror" mean the target
|
||||
mirror which is checked for the presence of up-to-date specs, and where
|
||||
any scheduled jobs should push built binary packages. In the past, this
|
||||
defaulted to the mirror at index 0 in the mirror configs, and could be
|
||||
overridden using the ``--buildcache-destination`` argument. Starting with
|
||||
Spack 0.23, ``spack ci generate`` will require you to identify this mirror
|
||||
by the name "buildcache-destination". While you can configure any number
|
||||
of mirrors as sources for your pipelines, you will need to identify the
|
||||
destination mirror by name.
|
||||
|
||||
Concretizes the specs in the active environment, stages them (as described in
|
||||
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
|
||||
During concretization of the environment, ``spack ci generate`` also writes a
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
Spack for Homebrew/Conda Users
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
|
||||
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.5
|
||||
pytest==7.4.2
|
||||
urllib3==2.0.7
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.9.1
|
||||
black==23.10.1
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
mypy==1.6.1
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
Python, 3.6--3.12, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
|
||||
|
@@ -156,6 +156,37 @@ def lookup(name):
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def polite_path(components: Iterable[str]):
|
||||
"""
|
||||
Given a list of strings which are intended to be path components,
|
||||
generate a path, and format each component to avoid generating extra
|
||||
path entries.
|
||||
|
||||
For example all "/", "\", and ":" characters will be replaced with
|
||||
"_". Other characters like "=" will also be replaced.
|
||||
"""
|
||||
return os.path.join(*[polite_filename(x) for x in components])
|
||||
|
||||
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
"""
|
||||
Replace generally problematic filename characters with underscores.
|
||||
|
||||
This differs from sanitize_filename in that it is more aggressive in
|
||||
changing characters in the name. For example it removes "=" which can
|
||||
confuse path parsing in external tools.
|
||||
"""
|
||||
# This character set applies for both Windows and Linux. It does not
|
||||
# account for reserved filenames in Windows.
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
@@ -211,6 +211,7 @@ def info(message, *args, **kwargs):
|
||||
stream.write(line + "\n")
|
||||
else:
|
||||
stream.write(indent + _output_filter(str(arg)) + "\n")
|
||||
stream.flush()
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
@@ -17,7 +17,9 @@ class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(self, target, constraint):
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
@@ -34,7 +36,7 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = CompilerSpec("gcc", version)
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
@@ -77,16 +79,20 @@ def _gcc_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
if len(pversion.version) < 2 or len(cversion.version) < 2:
|
||||
return False
|
||||
return pversion.version[:2] == cversion.version[:2]
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
@@ -95,7 +101,7 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if kwargs.get("loose", False):
|
||||
if loose:
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
@@ -116,9 +122,10 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(self, target, constraint, **kwargs):
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
loosematch = kwargs.get("loose", False)
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loosematch
|
||||
target, constraint, loose=loose
|
||||
)
|
||||
|
||||
@@ -307,10 +307,17 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
"""Ensure that patches fetched from GitHub and GitLab have stable sha256
|
||||
hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
r"^https?://(?:.+)?gitlab(?:.+)/"
|
||||
r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
@@ -321,19 +328,27 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if not re.match(github_patch_url_re, patch.url):
|
||||
continue
|
||||
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(gitlab_patch_url_re, patch.url):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
@@ -23,7 +25,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import List, NamedTuple, Optional, Union
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -31,6 +33,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
@@ -38,6 +41,9 @@
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
@@ -47,6 +53,7 @@
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.timer as timer
|
||||
@@ -124,25 +131,25 @@ class BinaryCacheIndex:
|
||||
mean we should have paid the price to update the cache earlier?
|
||||
"""
|
||||
|
||||
def __init__(self, cache_root):
|
||||
self._index_cache_root = cache_root
|
||||
def __init__(self, cache_root: Optional[str] = None):
|
||||
self._index_cache_root: str = cache_root or binary_index_location()
|
||||
|
||||
# the key associated with the serialized _local_index_cache
|
||||
self._index_contents_key = "contents.json"
|
||||
|
||||
# a FileCache instance storing copies of remote binary cache indices
|
||||
self._index_file_cache = None
|
||||
self._index_file_cache: Optional[file_cache.FileCache] = None
|
||||
|
||||
# stores a map of mirror URL to index hash and cache key (index path)
|
||||
self._local_index_cache = None
|
||||
self._local_index_cache: Optional[dict] = None
|
||||
|
||||
# hashes of remote indices already ingested into the concrete spec
|
||||
# cache (_mirrors_for_spec)
|
||||
self._specs_already_associated = set()
|
||||
self._specs_already_associated: Set[str] = set()
|
||||
|
||||
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
|
||||
# whether the fetch succeeded or not.
|
||||
self._last_fetch_times = {}
|
||||
self._last_fetch_times: Dict[str, float] = {}
|
||||
|
||||
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
|
||||
# entries indicating mirrors where that concrete spec can be found.
|
||||
@@ -152,7 +159,7 @@ def __init__(self, cache_root):
|
||||
# - the concrete spec itself, keyed by ``spec`` (including the
|
||||
# full hash, since the dag hash may match but we want to
|
||||
# use the updated source if available)
|
||||
self._mirrors_for_spec = {}
|
||||
self._mirrors_for_spec: Dict[str, dict] = {}
|
||||
|
||||
def _init_local_index_cache(self):
|
||||
if not self._index_file_cache:
|
||||
@@ -216,11 +223,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
except spack_db.InvalidDatabaseVersionError as e:
|
||||
msg = (
|
||||
tty.warn(
|
||||
f"you need a newer Spack version to read the buildcache index for the "
|
||||
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
||||
)
|
||||
raise BuildcacheIndexError(msg) from e
|
||||
return
|
||||
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
@@ -471,14 +478,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
FetchIndexError
|
||||
"""
|
||||
# TODO: get rid of this request, handle 404 better
|
||||
if not web_util.url_exists(
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, _build_cache_relative_path, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
etag = cache_entry.get("etag", None)
|
||||
if etag:
|
||||
fetcher = EtagIndexFetcher(mirror_url, etag)
|
||||
if scheme == "oci":
|
||||
# TODO: Actually etag and OCI are not mutually exclusive...
|
||||
fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None))
|
||||
elif cache_entry.get("etag"):
|
||||
fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"])
|
||||
else:
|
||||
fetcher = DefaultIndexFetcher(
|
||||
mirror_url, local_hash=cache_entry.get("index_hash", None)
|
||||
@@ -519,15 +530,8 @@ def binary_index_location():
|
||||
return spack.util.path.canonicalize_path(cache_root)
|
||||
|
||||
|
||||
def _binary_index():
|
||||
"""Get the singleton store instance."""
|
||||
return BinaryCacheIndex(binary_index_location())
|
||||
|
||||
|
||||
#: Singleton binary_index instance
|
||||
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
_binary_index
|
||||
)
|
||||
#: Default binary cache index instance
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
@@ -622,22 +626,14 @@ def build_cache_prefix(prefix):
|
||||
|
||||
|
||||
def buildinfo_file_name(prefix):
|
||||
"""
|
||||
Filename of the binary package meta-data file
|
||||
"""
|
||||
name = os.path.join(prefix, ".spack/binary_distribution")
|
||||
return name
|
||||
"""Filename of the binary package meta-data file"""
|
||||
return os.path.join(prefix, ".spack", "binary_distribution")
|
||||
|
||||
|
||||
def read_buildinfo_file(prefix):
|
||||
"""
|
||||
Read buildinfo file
|
||||
"""
|
||||
filename = buildinfo_file_name(prefix)
|
||||
with open(filename, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
buildinfo = syaml.load(content)
|
||||
return buildinfo
|
||||
"""Read buildinfo file"""
|
||||
with open(buildinfo_file_name(prefix), "r") as f:
|
||||
return syaml.load(f)
|
||||
|
||||
|
||||
class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
@@ -798,11 +794,7 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return os.path.join(
|
||||
str(spec.architecture),
|
||||
f"{spec.compiler.name}-{spec.compiler.version}",
|
||||
f"{spec.name}-{spec.version}",
|
||||
)
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -810,10 +802,10 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
return (
|
||||
f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-"
|
||||
f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}"
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def tarball_path_name(spec, ext):
|
||||
@@ -824,18 +816,6 @@ def tarball_path_name(spec, ext):
|
||||
return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
|
||||
|
||||
|
||||
def checksum_tarball(file):
|
||||
# calculate sha256 hash of tar file
|
||||
block_size = 65536
|
||||
hasher = hashlib.sha256()
|
||||
with open(file, "rb") as tfile:
|
||||
buf = tfile.read(block_size)
|
||||
while len(buf) > 0:
|
||||
hasher.update(buf)
|
||||
buf = tfile.read(block_size)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def select_signing_key(key=None):
|
||||
if key is None:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
@@ -914,7 +894,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
|
||||
# Push the hash
|
||||
@@ -922,7 +902,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
|
||||
|
||||
@@ -1152,63 +1132,190 @@ def gzip_compressed_tarfile(path):
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as fileobj, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
|
||||
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
|
||||
yield tar
|
||||
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
|
||||
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
|
||||
name="", mode="w", fileobj=outer_checksum
|
||||
) as tar:
|
||||
yield tar, inner_checksum, outer_checksum
|
||||
|
||||
|
||||
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||
# We only add files, symlinks, hardlinks, and directories
|
||||
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||
if tarinfo.isdev():
|
||||
return None
|
||||
|
||||
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||
# root will extract as root, and non-privileged user will extract as themselves.
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
|
||||
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||
tarinfo.mtime = 0
|
||||
|
||||
# Normalize mode
|
||||
if tarinfo.isfile() or tarinfo.islnk():
|
||||
# If user can execute, use 0o755; else 0o644
|
||||
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||
# extracted when Python or tar is run with privileges
|
||||
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||
else: # symbolic link and directories
|
||||
tarinfo.mode = 0o755
|
||||
|
||||
return tarinfo
|
||||
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
|
||||
"""Compute tarfile entry name as the relative path from the (system) root."""
|
||||
return _path(*_path(absolute_path).parts[1:]).as_posix()
|
||||
|
||||
|
||||
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=path)
|
||||
tarinfo.size = len(bstring)
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
|
||||
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
|
||||
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
|
||||
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
|
||||
|
||||
Args:
|
||||
tar: tarfile object to add files to
|
||||
prefix: absolute install prefix of spec"""
|
||||
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
|
||||
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
|
||||
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
|
||||
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
|
||||
|
||||
try: # skip buildinfo file if it exists
|
||||
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
|
||||
except OSError:
|
||||
files_to_skip = []
|
||||
|
||||
dir_stack = [prefix]
|
||||
while dir_stack:
|
||||
dir = dir_stack.pop()
|
||||
|
||||
# Add the dir before its contents
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
# Sort by name: reproducible & improves compression
|
||||
with os.scandir(dir) as it:
|
||||
entries = sorted(it, key=lambda entry: entry.name)
|
||||
|
||||
new_dirs = []
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
new_dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
|
||||
|
||||
s = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip existing binary distribution files.
|
||||
id = stat_key(s)
|
||||
if id in files_to_skip:
|
||||
continue
|
||||
|
||||
# Normalize the mode
|
||||
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
|
||||
|
||||
if entry.is_symlink():
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
# Deduplicate hardlinks
|
||||
if s.st_nlink > 1:
|
||||
if id in hardlink_to_tarinfo_name:
|
||||
file_info.type = tarfile.LNKTYPE
|
||||
file_info.linkname = hardlink_to_tarinfo_name[id]
|
||||
tar.addfile(file_info)
|
||||
continue
|
||||
hardlink_to_tarinfo_name[id] = file_info.name
|
||||
|
||||
# If file not yet seen, copy it.
|
||||
file_info.type = tarfile.REGTYPE
|
||||
file_info.size = s.st_size
|
||||
|
||||
with open(entry.path, "rb") as f:
|
||||
tar.addfile(file_info, f)
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
|
||||
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
|
||||
if tarinfo.name.endswith("/.spack/binary_distribution"):
|
||||
return None
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
|
||||
return deterministic_tarinfo(tarinfo)
|
||||
myfileobj = None
|
||||
|
||||
def __init__(self, fileobj, algorithm=hashlib.sha256):
|
||||
self.fileobj = fileobj
|
||||
self.hasher = algorithm()
|
||||
self.length = 0
|
||||
|
||||
def hexdigest(self):
|
||||
return self.hasher.hexdigest()
|
||||
|
||||
def write(self, data):
|
||||
if isinstance(data, (bytes, bytearray)):
|
||||
length = len(data)
|
||||
else:
|
||||
data = memoryview(data)
|
||||
length = data.nbytes
|
||||
|
||||
if length > 0:
|
||||
self.fileobj.write(data)
|
||||
self.hasher.update(data)
|
||||
|
||||
self.length += length
|
||||
|
||||
return length
|
||||
|
||||
def read(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read() on write-only object")
|
||||
|
||||
def read1(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read1() on write-only object")
|
||||
|
||||
def peek(self, n):
|
||||
raise OSError(errno.EBADF, "peek() on write-only object")
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self.fileobj is None
|
||||
|
||||
def close(self):
|
||||
fileobj = self.fileobj
|
||||
if fileobj is None:
|
||||
return
|
||||
self.fileobj.close()
|
||||
self.fileobj = None
|
||||
|
||||
def flush(self):
|
||||
self.fileobj.flush()
|
||||
|
||||
def fileno(self):
|
||||
return self.fileobj.fileno()
|
||||
|
||||
def rewind(self):
|
||||
raise OSError("Can't rewind while computing checksum")
|
||||
|
||||
def readable(self):
|
||||
return False
|
||||
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def tell(self):
|
||||
return self.fileobj.tell()
|
||||
|
||||
def seek(self, offset, whence=io.SEEK_SET):
|
||||
# In principle forward seek is possible with b"0" padding,
|
||||
# but this is not implemented.
|
||||
if offset == 0 and whence == io.SEEK_CUR:
|
||||
return
|
||||
raise OSError("Can't seek while computing checksum")
|
||||
|
||||
def readline(self, size=-1):
|
||||
raise OSError(errno.EBADF, "readline() on write-only object")
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
|
||||
tarinfo.type = tarfile.REGTYPE
|
||||
tarinfo.size = len(bstring)
|
||||
tarinfo.mode = 0o644
|
||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
||||
|
||||
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
|
||||
|
||||
|
||||
class PushOptions(NamedTuple):
|
||||
@@ -1280,20 +1387,14 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
|
||||
|
||||
# add sha256 checksum to spec.json
|
||||
|
||||
with open(spec_file, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith(".json"):
|
||||
@@ -1336,10 +1437,21 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs),
|
||||
)
|
||||
|
||||
|
||||
def specs_to_be_packaged(
|
||||
specs: List[Spec], root: bool = True, dependencies: bool = True
|
||||
) -> List[Spec]:
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
Raises NotInstalledError if a spec is not installed but picked to be packaged.
|
||||
|
||||
Args:
|
||||
specs: list of root specs to be processed
|
||||
@@ -1347,19 +1459,35 @@ def specs_to_be_packaged(
|
||||
dependencies: include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
|
||||
if not root and not dependencies:
|
||||
return []
|
||||
elif dependencies:
|
||||
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
|
||||
else:
|
||||
nodes = set(specs)
|
||||
|
||||
# Limit to installed non-externals.
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
|
||||
# Mass install check
|
||||
# Filter packageable roots
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return list(filter(packageable, nodes))
|
||||
if root:
|
||||
# Error on uninstalled roots, when roots are requested
|
||||
uninstalled_roots = list(s for s in specs if not s.installed)
|
||||
if uninstalled_roots:
|
||||
raise NotInstalledError(uninstalled_roots)
|
||||
roots = specs
|
||||
else:
|
||||
roots = []
|
||||
|
||||
if dependencies:
|
||||
# Error on uninstalled deps, when deps are requested
|
||||
deps = list(
|
||||
traverse.traverse_nodes(
|
||||
specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash
|
||||
)
|
||||
)
|
||||
uninstalled_deps = list(s for s in deps if not s.installed)
|
||||
if uninstalled_deps:
|
||||
raise NotInstalledError(uninstalled_deps)
|
||||
else:
|
||||
deps = []
|
||||
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
@@ -1467,8 +1595,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
tarball = tarball_path_name(spec, ".spack")
|
||||
specfile_prefix = tarball_name(spec, ".spec")
|
||||
|
||||
mirrors_to_try = []
|
||||
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
@@ -1481,63 +1607,116 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append(
|
||||
{
|
||||
"specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
|
||||
"spackfile": url_util.join(url, _build_cache_relative_path, tarball),
|
||||
}
|
||||
)
|
||||
mirrors = try_first + try_next
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ["json.sig", "json"]:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||
spackfile_url = mirror_to_try["spackfile"]
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
|
||||
if ext.endswith(".sig") and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
|
||||
if unsigned or signature_verified or not ext.endswith(".sig"):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
}
|
||||
# Fetch the manifest
|
||||
try:
|
||||
response = spack.oci.opener.urlopen(
|
||||
urllib.request.Request(
|
||||
url=ref.manifest_url(),
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
with spack.oci.oci.make_stage(
|
||||
ref.blob_url(spec_digest), spec_digest, keep=True
|
||||
) as local_specfile_stage:
|
||||
try:
|
||||
local_specfile_stage.fetch()
|
||||
local_specfile_stage.check()
|
||||
except Exception:
|
||||
continue
|
||||
local_specfile_stage.cache_local()
|
||||
|
||||
with spack.oci.oci.make_stage(
|
||||
ref.blob_url(tarball_digest), tarball_digest, keep=True
|
||||
) as tarball_stage:
|
||||
try:
|
||||
tarball_stage.fetch()
|
||||
tarball_stage.check()
|
||||
except Exception:
|
||||
continue
|
||||
tarball_stage.cache_local()
|
||||
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if try_signed and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
@@ -1770,7 +1949,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
|
||||
expected = remote_checksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -1831,6 +2010,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
spec_dict = sjson.load(content)
|
||||
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified = download_result["signature_verified"]
|
||||
tmpdir = None
|
||||
@@ -1863,7 +2043,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
|
||||
expected = bchecksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -2069,7 +2249,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# The index may be out-of-date. If we aren't only considering indices, try
|
||||
# to fetch directly since we know where the file should be.
|
||||
@@ -2078,7 +2258,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
# We found a spec by the direct fetch approach, we might as well
|
||||
# add it to our mapping.
|
||||
if results:
|
||||
binary_index.update_spec(spec, results)
|
||||
BINARY_INDEX.update_spec(spec, results)
|
||||
|
||||
return results
|
||||
|
||||
@@ -2094,12 +2274,12 @@ def update_cache_and_get_specs():
|
||||
Throws:
|
||||
FetchCacheError
|
||||
"""
|
||||
binary_index.update()
|
||||
return binary_index.get_all_built_specs()
|
||||
BINARY_INDEX.update()
|
||||
return BINARY_INDEX.get_all_built_specs()
|
||||
|
||||
|
||||
def clear_spec_cache():
|
||||
binary_index.clear()
|
||||
BINARY_INDEX.clear()
|
||||
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
@@ -2422,7 +2602,7 @@ def get_remote_hash(self):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
|
||||
def conditional_fetch(self):
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Do an intermediate fetch for the hash
|
||||
# and a conditional fetch for the contents
|
||||
|
||||
@@ -2436,12 +2616,12 @@ def conditional_fetch(self):
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
return FetchCacheError("Remote index {} is invalid".format(url_index), e)
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -2473,7 +2653,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
self.etag = etag
|
||||
self.urlopen = urlopen
|
||||
|
||||
def conditional_fetch(self):
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, _build_cache_relative_path, "index.json")
|
||||
headers = {
|
||||
@@ -2504,3 +2684,59 @@ def conditional_fetch(self):
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
|
||||
|
||||
class OCIIndexFetcher:
|
||||
def __init__(self, url: str, local_hash, urlopen=None) -> None:
|
||||
self.local_hash = local_hash
|
||||
|
||||
# Remove oci:// prefix
|
||||
assert url.startswith("oci://")
|
||||
self.ref = spack.oci.image.ImageReference.from_string(url[6:])
|
||||
self.urlopen = urlopen or spack.oci.opener.urlopen
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
"""Download an index from an OCI registry type mirror."""
|
||||
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=url_manifest,
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError(
|
||||
"Could not fetch manifest from {}".format(url_manifest), e
|
||||
) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Get first blob hash, which should be the index.json
|
||||
try:
|
||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Fresh?
|
||||
if index_digest.digest == self.local_hash:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
@@ -214,7 +214,7 @@ def _install_and_test(
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
@@ -228,7 +228,7 @@ def _install_and_test(
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"):
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
@@ -291,6 +291,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
@@ -446,16 +450,11 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
|
||||
@@ -40,12 +40,15 @@
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
@@ -55,17 +58,21 @@
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
@@ -76,7 +83,6 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
validate,
|
||||
)
|
||||
@@ -109,7 +115,6 @@
|
||||
SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
|
||||
SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
|
||||
|
||||
|
||||
# Platform-specific library suffix.
|
||||
if sys.platform == "darwin":
|
||||
dso_suffix = "dylib"
|
||||
@@ -406,19 +411,13 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper
|
||||
(which have the prefix `SPACK_`) and also add the compiler wrappers
|
||||
to PATH.
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
|
||||
This determines the injected -L/-I/-rpath options; each
|
||||
of these specifies a search order and this function computes these
|
||||
options in a manner that is intended to match the DAG traversal order
|
||||
in `modifications_from_dependencies`: that method uses a post-order
|
||||
traversal so that `PrependPath` actions from dependencies take lower
|
||||
precedence; we use a post-order traversal here to match the visitation
|
||||
order of `modifications_from_dependencies` (so we are visiting the
|
||||
lowest priority packages first).
|
||||
"""
|
||||
This determines the injected -L/-I/-rpath options; each of these specifies a search order and
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
@@ -537,45 +536,42 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
marker = "_set_run_already_called"
|
||||
if getattr(pkg.module, marker, False):
|
||||
return
|
||||
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
|
||||
m = module
|
||||
m.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
if context == Context.BUILD:
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
m.make_jobs = jobs
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
|
||||
# Put spack compiler paths in module scope.
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
@@ -599,9 +595,6 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
||||
|
||||
m.static_to_shared_library = static_to_shared_library
|
||||
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
setattr(m, marker, True)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
|
||||
@@ -727,12 +720,15 @@ def load_external_modules(pkg):
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty, context="build"):
|
||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in ["build", "test"]:
|
||||
raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
|
||||
if context not in (Context.BUILD, Context.TEST):
|
||||
raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}")
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
# First populate the package.py's module with the relevant globals that could be used in any
|
||||
# of the setup_* functions.
|
||||
setup_context = SetupContext(pkg.spec, context=context)
|
||||
setup_context.set_all_package_py_globals()
|
||||
|
||||
# Keep track of env changes from packages separately, since we want to
|
||||
# issue warnings when packages make "suspicious" modifications.
|
||||
@@ -740,13 +736,15 @@ def setup_package(pkg, dirty, context="build"):
|
||||
env_mods = EnvironmentModifications()
|
||||
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
|
||||
need_compiler = context == Context.BUILD or (
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
# architecture specific setup
|
||||
@@ -754,28 +752,13 @@ def setup_package(pkg, dirty, context="build"):
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == "build":
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
|
||||
if (not dirty) and (not env_mods.is_unset("CPATH")):
|
||||
tty.debug(
|
||||
"A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == "test":
|
||||
tty.debug("setup_package: setup test environment for root")
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path,
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env_mods)
|
||||
if context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||
tty.debug(
|
||||
"A dependency has updated CPATH, this may lead pkg-config to assume that the package "
|
||||
"is part of the system includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
|
||||
# First apply the clean environment changes
|
||||
env_base.apply_modifications()
|
||||
@@ -813,158 +796,256 @@ def setup_package(pkg, dirty, context="build"):
|
||||
return env_base
|
||||
|
||||
|
||||
def _make_runnable(pkg, env):
|
||||
# Helper method which prepends a Package's bin/ prefix to the PATH
|
||||
# environment variable
|
||||
prefix = pkg.prefix
|
||||
class EnvironmentVisitor:
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
# than for their deps, depending on the context.
|
||||
self.root_hashes = set(s.dag_hash() for s in roots)
|
||||
|
||||
for dirname in ["bin", "bin64"]:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
if context == Context.BUILD:
|
||||
# Drop direct run deps in build context
|
||||
# We don't really distinguish between install and build time test deps,
|
||||
# so we include them here as build-time test deps.
|
||||
self.root_depflag = dt.BUILD | dt.TEST | dt.LINK
|
||||
elif context == Context.TEST:
|
||||
# This is more of an extended run environment
|
||||
self.root_depflag = dt.TEST | dt.RUN | dt.LINK
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
depflag = self.root_depflag
|
||||
else:
|
||||
depflag = dt.LINK | dt.RUN
|
||||
return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
|
||||
def modifications_from_dependencies(
|
||||
spec, context, custom_mods_only=True, set_package_py_globals=True
|
||||
):
|
||||
"""Returns the environment modifications that are required by
|
||||
the dependencies of a spec and also applies modifications
|
||||
to this spec's package at module scope, if need be.
|
||||
class UseMode(Flag):
|
||||
#: Entrypoint spec (a spec to be built; an env root, etc)
|
||||
ROOT = auto()
|
||||
|
||||
Environment modifications include:
|
||||
#: A spec used at runtime, but no executables in PATH
|
||||
RUNTIME = auto()
|
||||
|
||||
- Updating PATH so that executables can be found
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies
|
||||
- Running custom package environment modifications
|
||||
#: A spec used at runtime, with executables in PATH
|
||||
RUNTIME_EXECUTABLE = auto()
|
||||
|
||||
Custom package modifications can conflict with the default PATH changes
|
||||
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
|
||||
environment variables), so this applies changes in a fixed order:
|
||||
#: A spec that's a direct build or test dep
|
||||
BUILDTIME_DIRECT = auto()
|
||||
|
||||
- All modifications (custom and default) from external deps first
|
||||
- All modifications from non-external deps afterwards
|
||||
#: A spec that should be visible in search paths in a build env.
|
||||
BUILDTIME = auto()
|
||||
|
||||
With that order, `PrependPath` actions from non-external default
|
||||
environment modifications will take precedence over custom modifications
|
||||
from external packages.
|
||||
#: Flag is set when the (node, mode) is finalized
|
||||
ADDED = auto()
|
||||
|
||||
A secondary constraint is that custom and default modifications are
|
||||
grouped on a per-package basis: combined with the post-order traversal this
|
||||
means that default modifications of dependents can override custom
|
||||
modifications of dependencies (again, this would only occur for PATH,
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
custom_mods_only (bool): if True returns only custom modifications, if False
|
||||
returns custom and default modifications
|
||||
set_package_py_globals (bool): whether or not to set the global variables in the
|
||||
package.py files (this may be problematic when using buildcaches that have
|
||||
been built on a different but compatible OS)
|
||||
"""
|
||||
if context not in ["build", "run", "test"]:
|
||||
raise ValueError(
|
||||
"Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
|
||||
def effective_deptypes(
|
||||
*specs: spack.spec.Spec, context: Context = Context.BUILD
|
||||
) -> List[Tuple[spack.spec.Spec, UseMode]]:
|
||||
"""Given a list of input specs and a context, return a list of tuples of
|
||||
all specs that contribute to (environment) modifications, together with
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in visitor.edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
if parent is None:
|
||||
use_modes[child] = UseMode.ROOT
|
||||
continue
|
||||
|
||||
parent_mode = use_modes[parent]
|
||||
|
||||
# Nothing to propagate.
|
||||
if not parent_mode:
|
||||
continue
|
||||
|
||||
# Dependending on the context, include particular deps from the root.
|
||||
if UseMode.ROOT & parent_mode:
|
||||
if context == Context.BUILD:
|
||||
if (dt.BUILD | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME_DIRECT
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
elif context == Context.TEST:
|
||||
if (dt.RUN | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
elif context == Context.RUN:
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
# Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps.
|
||||
if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
|
||||
# Propagate BUILDTIME through link deps.
|
||||
if UseMode.BUILDTIME & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
# Finalize the spec; the invariant is that all in-edges are processed
|
||||
# before out-edges, meaning that parent is done.
|
||||
if not (UseMode.ADDED & parent_mode):
|
||||
use_modes[parent] |= UseMode.ADDED
|
||||
nodes_with_type.append((parent, parent_mode))
|
||||
|
||||
# Attach the leaf nodes, since we only added nodes with out-edges.
|
||||
for spec, parent_mode in use_modes.items():
|
||||
if parent_mode and not (UseMode.ADDED & parent_mode):
|
||||
nodes_with_type.append((spec, parent_mode))
|
||||
|
||||
return nodes_with_type
|
||||
|
||||
|
||||
class SetupContext:
|
||||
"""This class encapsulates the logic to determine environment modifications, and is used as
|
||||
well to set globals in modules of package.py."""
|
||||
|
||||
def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
|
||||
"""Construct a ModificationsFromDag object.
|
||||
Args:
|
||||
specs: single root spec for build/test context, possibly more for run context
|
||||
context: build, run, or test"""
|
||||
if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1:
|
||||
raise ValueError("Cannot setup build environment for multiple specs")
|
||||
specs_with_type = effective_deptypes(*specs, context=context)
|
||||
|
||||
self.specs = specs
|
||||
self.context = context
|
||||
self.external: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
# Reverse so we go from leaf to root
|
||||
self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type)
|
||||
|
||||
# Split into non-external and external, maintaining topo order per group.
|
||||
self.external, self.nonexternal = stable_partition(
|
||||
reversed(specs_with_type), lambda t: t[0].external
|
||||
)
|
||||
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_run_env = (
|
||||
UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
|
||||
)
|
||||
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
|
||||
self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0)
|
||||
|
||||
env = EnvironmentModifications()
|
||||
if context == Context.RUN or context == Context.TEST:
|
||||
self.should_be_runnable |= UseMode.ROOT
|
||||
self.should_setup_run_env |= UseMode.ROOT
|
||||
|
||||
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
|
||||
# function; these sets form the building blocks of those collections.
|
||||
build_deps = set(spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(spec.traverse(root=False, deptype="link"))
|
||||
build_link_deps = build_deps | link_deps
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
|
||||
run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
|
||||
test_and_supporting_deps = set()
|
||||
for test_dep in set(spec.dependencies(deptype="test")):
|
||||
test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
|
||||
# Everything that calls setup_run_environment and setup_dependent_* needs globals set.
|
||||
self.should_set_package_py_globals = (
|
||||
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
|
||||
)
|
||||
# In a build context, the root and direct build deps need build-specific globals set.
|
||||
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
|
||||
|
||||
# All dependencies that might have environment modifications to apply
|
||||
custom_mod_deps = set()
|
||||
if context == "build":
|
||||
custom_mod_deps.update(build_and_supporting_deps)
|
||||
# Tests may be performed after build
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
else:
|
||||
# test/run context
|
||||
custom_mod_deps.update(run_and_supporting_deps)
|
||||
if context == "test":
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
custom_mod_deps.update(link_deps)
|
||||
def set_all_package_py_globals(self):
|
||||
"""Set the globals in modules of package.py files."""
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
|
||||
# Determine 'exe_deps': the set of packages with binaries we want to use
|
||||
if context == "build":
|
||||
exe_deps = build_and_supporting_deps | test_and_supporting_deps
|
||||
elif context == "run":
|
||||
exe_deps = set(spec.traverse(deptype="run"))
|
||||
elif context == "test":
|
||||
exe_deps = test_and_supporting_deps
|
||||
if self.should_set_package_py_globals & flag:
|
||||
if self.context == Context.BUILD and self.needs_build_context & flag:
|
||||
set_package_py_globals(pkg, context=Context.BUILD)
|
||||
else:
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
def default_modifications_for_dep(dep):
|
||||
if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
|
||||
prefix = dep.prefix
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
if id(spec) not in self.nodes_in_subdag:
|
||||
continue
|
||||
dependent_module = ModuleChangePropagator(spec.package)
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", prefix)
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
- Updating PATH for packages that are required at runtime
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies (when context=build)
|
||||
- Running custom package environment modifications: setup_run_environment,
|
||||
setup_dependent_run_environment, setup_build_environment,
|
||||
setup_dependent_build_environment.
|
||||
|
||||
for directory in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(prefix, directory, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
The (partial) order imposed on the specs is externals first, then topological
|
||||
from leaf to root. That way externals cannot contribute search paths that would shadow
|
||||
Spack's prefixes, and dependents override variables set by dependencies."""
|
||||
env = EnvironmentModifications()
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
tty.debug(f"Adding env modifications for {dspec.name}")
|
||||
pkg = dspec.package
|
||||
|
||||
if dep in exe_deps and not is_system_path(dep.prefix):
|
||||
_make_runnable(dep, env)
|
||||
if self.should_setup_dependent_build_env & flag:
|
||||
self._make_buildtime_detectable(dspec, env)
|
||||
|
||||
def add_modifications_for_dep(dep):
|
||||
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
# and default modifications together on a per-package basis).
|
||||
if not custom_mods_only:
|
||||
default_modifications_for_dep(dep)
|
||||
for root in self.specs: # there is only one root in build context
|
||||
spack.builder.create(pkg).setup_dependent_build_environment(env, root)
|
||||
|
||||
# Perform custom modifications here (PrependPath actions performed in
|
||||
# the custom method override the default environment modifications
|
||||
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
|
||||
# PKG_CONFIG_PATH)
|
||||
if dep in custom_mod_deps:
|
||||
dpkg = dep.package
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
if self.should_setup_build_env & flag:
|
||||
spack.builder.create(pkg).setup_build_environment(env)
|
||||
|
||||
current_module = ModuleChangePropagator(spec.package)
|
||||
dpkg.setup_dependent_package(current_module, spec)
|
||||
current_module.propagate_changes_to_mro()
|
||||
if self.should_be_runnable & flag:
|
||||
self._make_runnable(dspec, env)
|
||||
|
||||
if context == "build":
|
||||
builder = spack.builder.create(dpkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||
if self.should_setup_run_env & flag:
|
||||
run_env_mods = EnvironmentModifications()
|
||||
for spec in dspec.dependents(deptype=dt.LINK | dt.RUN):
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(run_env_mods, spec)
|
||||
pkg.setup_run_environment(run_env_mods)
|
||||
run_env_dict = run_env_mods.group_by_name()
|
||||
if self.context == Context.BUILD:
|
||||
run_env_mods.drop("CC", "CXX", "F77", "FC")
|
||||
env.extend(run_env_mods)
|
||||
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
# the post-order semantics, it also guarantees a fixed traversal order
|
||||
# among dependencies which are not constrained by post-order semantics.
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
return env
|
||||
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
# Default env modifications for non-external packages can override
|
||||
# custom modifications of external packages (this can only occur
|
||||
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
|
||||
if not dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
|
||||
return env
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix)
|
||||
for d in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(dep.prefix, d, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
|
||||
def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
|
||||
for d in ("bin", "bin64"):
|
||||
bin_dir = os.path.join(dep.prefix, d)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
@@ -996,7 +1077,7 @@ def get_cmake_prefix_path(pkg):
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
@@ -1012,7 +1093,7 @@ def _setup_pkg_and_run(
|
||||
if not kwargs.get("fake", False):
|
||||
kwargs["unmodified_env"] = os.environ.copy()
|
||||
kwargs["env_modifications"] = setup_package(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context)
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
@@ -46,6 +46,7 @@ class AutotoolsPackage(spack.package_base.PackageBase):
|
||||
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
||||
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
||||
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
||||
depends_on("gmake", type="build")
|
||||
conflicts("platform=windows")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
|
||||
@@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags):
|
||||
# We specify for each of them.
|
||||
if flags["ldflags"]:
|
||||
ldflags = " ".join(flags["ldflags"])
|
||||
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
|
||||
# cmake has separate linker arguments for types of builds.
|
||||
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
self.cmake_flag_args.append(ld_string.format(type, ldflags))
|
||||
self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}")
|
||||
|
||||
# CMake has libs options separated by language. Apply ours to each.
|
||||
if flags["ldlibs"]:
|
||||
|
||||
@@ -9,7 +9,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
@@ -29,7 +30,10 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
legacy_buildsystem = "makefile"
|
||||
|
||||
build_system("makefile")
|
||||
conflicts("platform=windows", when="build_system=makefile")
|
||||
|
||||
with when("build_system=makefile"):
|
||||
conflicts("platform=windows")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, variant
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
@@ -47,6 +47,13 @@ class MesonPackage(spack.package_base.PackageBase):
|
||||
variant("strip", default=False, description="Strip targets on install")
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||
# when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson
|
||||
# release, the distutils requirement will be dropped, so this conflict can be relaxed.
|
||||
# We have patches to make it work with meson 1.1 and above.
|
||||
conflicts("^python@3.12:", when="^meson@:1.0")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Produces a list of all command line arguments to pass the specified
|
||||
|
||||
@@ -23,13 +23,29 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary):
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
for key, item in dictionary.items():
|
||||
if isinstance(item, dict):
|
||||
# Recursive case
|
||||
for value in _flatten_dict(item):
|
||||
yield f"{key}={value}"
|
||||
else:
|
||||
# Base case
|
||||
yield f"{key}={item}"
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@@ -362,7 +378,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_directory", "install_time_test_callbacks")
|
||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
@@ -407,14 +423,15 @@ def build_directory(self):
|
||||
def config_settings(self, spec, prefix):
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer.
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
Returns:
|
||||
dict: dictionary of KEY, VALUE settings
|
||||
dict: Possibly nested dictionary of KEY, VALUE settings
|
||||
"""
|
||||
return {}
|
||||
|
||||
@@ -450,29 +467,28 @@ def global_options(self, spec, prefix):
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
||||
|
||||
for key, value in self.config_settings(spec, prefix).items():
|
||||
if spec["py-pip"].version < Version("22.1"):
|
||||
raise SpecError(
|
||||
"'{}' package uses 'config_settings' which is only supported by "
|
||||
"pip 22.1+. Add the following line to the package to fix this:\n\n"
|
||||
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
|
||||
)
|
||||
|
||||
args.append("--config-settings={}={}".format(key, value))
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
for setting in _flatten_dict(self.config_settings(spec, prefix)):
|
||||
args.append(f"--config-settings={setting}")
|
||||
for option in self.install_options(spec, prefix):
|
||||
args.append("--install-option=" + option)
|
||||
args.append(f"--install-option={option}")
|
||||
for option in self.global_options(spec, prefix):
|
||||
args.append("--global-option=" + option)
|
||||
args.append(f"--global-option={option}")
|
||||
|
||||
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
|
||||
args.append(pkg.stage.archive_file)
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = inspect.getmodule(pkg).pip
|
||||
pip = spec["python"].command
|
||||
# Hide user packages, since we don't have build isolation. This is
|
||||
# necessary because pip / setuptools may run hooks from arbitrary
|
||||
# packages during the build. There is no equivalent variable to hide
|
||||
# system packages, so this is not reliable for external Python.
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
|
||||
@property
|
||||
def subdirectory(self):
|
||||
if self.racket_name:
|
||||
if self.pkg.racket_name:
|
||||
return "pkgs/{0}".format(self.pkg.racket_name)
|
||||
return None
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -49,7 +50,11 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
)
|
||||
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
spack_compiler = spack.main.SpackCommand("compiler")
|
||||
@@ -69,48 +74,23 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
"""Given the necessary parts, format the gitlab job name
|
||||
def get_job_name(spec: spack.spec.Spec, build_group: str = ""):
|
||||
"""Given a spec and possibly a build group, return the job name. If the
|
||||
resulting name is longer than 255 characters, it will be truncated.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
build_group (str): Name of build group this job belongs to (a CDash
|
||||
notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
item_idx = 0
|
||||
format_str = ""
|
||||
format_args = []
|
||||
|
||||
format_str += "{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
format_str += "/{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.dag_hash(7))
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(osarch)
|
||||
item_idx += 1
|
||||
job_name = spec.format(JOB_NAME_FORMAT)
|
||||
|
||||
if build_group:
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(build_group)
|
||||
item_idx += 1
|
||||
job_name = "{0} {1}".format(job_name, build_group)
|
||||
|
||||
return format_str.format(*format_args)
|
||||
return job_name[:255]
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
@@ -118,15 +98,6 @@ def _remove_reserved_tags(tags):
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||
|
||||
if spec.architecture:
|
||||
format_elements.append(" {arch=architecture}")
|
||||
|
||||
return spec.format("".join(format_elements))
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
||||
|
||||
@@ -231,22 +202,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
tty.msg(f" stage {stage_index} ({len(stage)} jobs):")
|
||||
|
||||
for job in sorted(stage):
|
||||
for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)):
|
||||
s = spec_labels[job]
|
||||
rebuild = rebuild_decisions[job].rebuild
|
||||
reason = rebuild_decisions[job].reason
|
||||
reason_msg = " ({0})".format(reason) if reason else ""
|
||||
tty.msg(
|
||||
" [{1}] {0} -> {2}{3}".format(
|
||||
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||
)
|
||||
)
|
||||
if rebuild_decisions[job].mirrors:
|
||||
tty.msg(" found on the following mirrors:")
|
||||
for murl in rebuild_decisions[job].mirrors:
|
||||
tty.msg(" {0}".format(murl))
|
||||
reason_msg = f" ({reason})" if reason else ""
|
||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if rebuild_decisions[job].rebuild:
|
||||
status = colorize("@*g{[x]} ")
|
||||
msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}"
|
||||
else:
|
||||
msg = f"{s.format(spec_fmt)}{reason_msg}"
|
||||
if rebuild_decisions[job].mirrors:
|
||||
msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]"
|
||||
msg = colorize(f" @K - {cescape(msg)}@.")
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list):
|
||||
@@ -337,7 +308,7 @@ def _spec_matches(spec, match_string):
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
@@ -347,7 +318,7 @@ def _format_job_needs(
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||
"job": get_job_name(dep_job, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
@@ -700,7 +671,7 @@ def generate_gitlab_ci_yaml(
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -797,17 +768,39 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -819,6 +812,7 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
@@ -827,13 +821,15 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
@@ -853,8 +849,9 @@ def generate_gitlab_ci_yaml(
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
@@ -906,6 +903,7 @@ def generate_gitlab_ci_yaml(
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
@@ -920,13 +918,13 @@ def generate_gitlab_ci_yaml(
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the override mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
bindist.BINARY_INDEX.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.warn(e)
|
||||
|
||||
@@ -1023,8 +1021,7 @@ def main_script_replacements(cmd):
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
@@ -1051,7 +1048,6 @@ def main_script_replacements(cmd):
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
@@ -1137,6 +1133,7 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
@@ -1166,10 +1163,12 @@ def main_script_replacements(cmd):
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1200,10 +1199,28 @@ def main_script_replacements(cmd):
|
||||
sync_job["needs"] = [
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
]
|
||||
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
|
||||
output_object["copy"] = sync_job
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
@@ -1237,6 +1254,13 @@ def main_script_replacements(cmd):
|
||||
signing_job["when"] = "always"
|
||||
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -1245,13 +1269,13 @@ def main_script_replacements(cmd):
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
@@ -1273,20 +1297,24 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
@@ -2026,43 +2054,23 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
|
||||
|
||||
def create_buildcache(
|
||||
input_spec: spack.spec.Spec,
|
||||
*,
|
||||
pipeline_mirror_url: Optional[str] = None,
|
||||
buildcache_mirror_url: Optional[str] = None,
|
||||
sign_binaries: bool = False,
|
||||
input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False
|
||||
) -> List[PushResult]:
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
Arguments:
|
||||
input_spec: Installed spec to package and push
|
||||
buildcache_mirror_url: URL for the buildcache mirror
|
||||
pipeline_mirror_url: URL for the pipeline mirror
|
||||
destination_mirror_urls: List of urls to push to
|
||||
sign_binaries: Whether or not to sign buildcache entry
|
||||
|
||||
Returns: A list of PushResults, indicating success or failure.
|
||||
"""
|
||||
results = []
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
for mirror_url in destination_mirror_urls:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries),
|
||||
url=buildcache_mirror_url,
|
||||
)
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries),
|
||||
url=pipeline_mirror_url,
|
||||
success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2242,13 +2250,13 @@ def build_name(self):
|
||||
spec.architecture,
|
||||
self.build_group,
|
||||
)
|
||||
tty.verbose(
|
||||
tty.debug(
|
||||
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
||||
)
|
||||
return build_name
|
||||
|
||||
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
return build_name
|
||||
|
||||
@property # type: ignore
|
||||
@@ -2262,11 +2270,11 @@ def build_stamp(self):
|
||||
Returns: (str) current CDash build stamp"""
|
||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||
if build_stamp:
|
||||
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
|
||||
tty.debug("Generated new build stamp ({0})".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
@property # type: ignore
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||
@@ -14,4 +15,4 @@
|
||||
|
||||
|
||||
def build_env(parser, args):
|
||||
env_utility.emulate_env_utility("build-env", "build", args)
|
||||
env_utility.emulate_env_utility("build-env", Context.BUILD, args)
|
||||
|
||||
@@ -3,16 +3,19 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
@@ -22,17 +25,37 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -58,7 +81,9 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL"
|
||||
)
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -84,7 +109,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.add_argument(
|
||||
"--base-image", default=None, help="specify the base image for the buildcache. "
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
@@ -268,6 +296,21 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
def _make_pool():
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -281,63 +324,80 @@ def push_fn(args):
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
url = mirror.push_url
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(args.mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if image_ref:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not args.unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
url = args.mirror.push_url
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
|
||||
total_specs = len(specs)
|
||||
digits = len(str(total_specs))
|
||||
# TODO: unify this logic in the future.
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
if total_specs > 1:
|
||||
msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
|
||||
else:
|
||||
msg = f"Pushed {format_spec(spec)} to {url}"
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {url}"
|
||||
tty.info(msg)
|
||||
|
||||
tty.info(msg)
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -364,6 +424,341 @@ def push_fn(args):
|
||||
),
|
||||
)
|
||||
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if image_ref and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
spec: spack.spec.Spec,
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
# We use this to store the Spack spec, so we can use it to create an index.
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
config.update(spec_dict)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
"annotations": {"org.opencontainers.image.description": spec.format()},
|
||||
}
|
||||
|
||||
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
return image_ref_for_spec
|
||||
|
||||
|
||||
def _push_oci(
|
||||
args,
|
||||
image_ref: ImageReference,
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: multiprocessing.pool.Pool,
|
||||
) -> List[str]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
args: The command line arguments.
|
||||
image_ref: The image reference.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
|
||||
Returns:
|
||||
List[str]: The list of skipped specs (already in the buildcache).
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# The base image to use for the package. When not set, we use
|
||||
# the OCI registry only for storage, and do not use any base image.
|
||||
base_image_ref: Optional[ImageReference] = (
|
||||
ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
)
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not args.force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base image layers, probably fine to do sequentially.
|
||||
for spec in to_be_uploaded:
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
# Get base image details, if we don't have them yet
|
||||
if architecture in base_images:
|
||||
continue
|
||||
if base_image_ref is None:
|
||||
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
|
||||
else:
|
||||
base_images[architecture] = copy_missing_layers_with_retry(
|
||||
base_image_ref, image_ref, architecture
|
||||
)
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pushed_image_ref = pool.starmap(
|
||||
_put_manifest,
|
||||
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
|
||||
|
||||
return skipped
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(
|
||||
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
|
||||
) -> None:
|
||||
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
|
||||
spack.oci.opener.ensure_status(response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -414,7 +809,7 @@ def preview_fn(args):
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
def check_fn(args: argparse.Namespace):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
@@ -429,7 +824,7 @@ def check_fn(args):
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
specs = _matching_specs(specs)
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
@@ -522,7 +917,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
|
||||
try:
|
||||
temp_stage = Stage(src_url, path=os.path.dirname(local_path))
|
||||
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
@@ -616,6 +1011,20 @@ def manifest_copy(manifest_file_list):
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
# Special case OCI images for now.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
@@ -35,30 +35,30 @@ def setup_parser(subparser):
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
"-b",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
"-l",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
"-p",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
"-a",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
@@ -66,27 +66,26 @@ def setup_parser(subparser):
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)")
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
"versions",
|
||||
nargs="*",
|
||||
help="checksum these specific versions (if omitted, Spack searches for remote versions)",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["jobs"])
|
||||
subparser.epilog = (
|
||||
"examples:\n"
|
||||
" `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n"
|
||||
" `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n"
|
||||
)
|
||||
|
||||
|
||||
def checksum(parser, args):
|
||||
# Did the user pass 'package@version' string?
|
||||
if len(args.versions) == 0 and "@" in args.package:
|
||||
args.versions = [args.package.split("@")[1]]
|
||||
args.package = args.package.split("@")[0]
|
||||
|
||||
# Make sure the user provided a package and not a URL
|
||||
if not valid_fully_qualified_module_name(args.package):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
spec = spack.spec.Spec(args.package)
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
@@ -128,18 +127,41 @@ def checksum(parser, args):
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.
|
||||
# For example, GitHub release pages sometimes have multiple tarballs with different shasum:
|
||||
# - releases/download/1.0/<pkg>-1.0.tar.gz (uploaded tarball)
|
||||
# - archive/refs/tags/1.0.tar.gz (generated tarball)
|
||||
# We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so
|
||||
# here we check whether the crawled and computed URLs disagree, and if so, prioritize the
|
||||
# former if that URL exists (just sending a HEAD request that is).
|
||||
url_changed_for_version = set()
|
||||
for version, url in url_dict.items():
|
||||
possible_urls = pkg.all_urls_for_version(version)
|
||||
if url not in possible_urls:
|
||||
for possible_url in possible_urls:
|
||||
if web_util.url_exists(possible_url):
|
||||
url_dict[version] = possible_url
|
||||
break
|
||||
else:
|
||||
url_changed_for_version.add(version)
|
||||
|
||||
if not url_dict:
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
filtered_url_dict = spack.stage.interactive_version_filter(
|
||||
url_dict,
|
||||
pkg.versions,
|
||||
url_changes=url_changed_for_version,
|
||||
initial_verion_filter=spec.versions,
|
||||
)
|
||||
if not filtered_url_dict:
|
||||
exit(0)
|
||||
url_dict = filtered_url_dict
|
||||
else:
|
||||
tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}")
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
|
||||
@@ -191,6 +191,14 @@ def ci_generate(args):
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
@@ -264,12 +272,6 @@ def ci_rebuild(args):
|
||||
if not ci_config:
|
||||
tty.die("spack ci rebuild requires an env containing ci cfg")
|
||||
|
||||
tty.msg(
|
||||
"SPACK_BUILDCACHE_DESTINATION={0}".format(
|
||||
os.environ.get("SPACK_BUILDCACHE_DESTINATION", None)
|
||||
)
|
||||
)
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -277,6 +279,7 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
@@ -285,9 +288,12 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
@@ -344,21 +350,36 @@ def ci_rebuild(args):
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
@@ -454,12 +475,14 @@ def ci_rebuild(args):
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
@@ -477,7 +500,8 @@ def ci_rebuild(args):
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
@@ -499,6 +523,7 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
@@ -513,7 +538,8 @@ def ci_rebuild(args):
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
if full_rebuild:
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
@@ -579,7 +605,9 @@ def ci_rebuild(args):
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
|
||||
"install-deps/{}".format(
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
]
|
||||
@@ -676,21 +704,25 @@ def ci_rebuild(args):
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
if buildcache_mirror_url or pipeline_mirror_url:
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
|
||||
@@ -543,7 +543,7 @@ def add_concretizer_args(subparser):
|
||||
)
|
||||
|
||||
|
||||
def add_s3_connection_args(subparser, add_help):
|
||||
def add_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
@@ -559,6 +559,8 @@ def add_s3_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
@@ -15,7 +14,8 @@
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import traverse
|
||||
from spack import build_environment, traverse
|
||||
from spack.context import Context
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -42,14 +42,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context="build"):
|
||||
if context not in ("build", "test"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
def __init__(self, context: Context = Context.BUILD):
|
||||
if context == Context.BUILD:
|
||||
# TODO: run deps shouldn't be required for build env.
|
||||
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
|
||||
else:
|
||||
elif context == Context.TEST:
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
else:
|
||||
raise ValueError("context can only be Context.BUILD or Context.TEST")
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
@@ -76,7 +76,7 @@ def neighbors(self, item):
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
def emulate_env_utility(cmd_name, context: Context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
|
||||
@@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
# in Spec.tree(...).
|
||||
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||
deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -62,6 +63,10 @@ class {class_name}({base_class_name}):
|
||||
# notify when the package is updated.
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
# FIXME: Add the SPDX identifier of the project's license below.
|
||||
# See https://spdx.org/licenses/ for a list.
|
||||
license("UNKNOWN")
|
||||
|
||||
{versions}
|
||||
|
||||
{dependencies}
|
||||
@@ -823,6 +828,11 @@ def get_versions(args, name):
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
|
||||
if url_dict_filtered is None:
|
||||
exit(0)
|
||||
url_dict = url_dict_filtered
|
||||
except UndetectableVersionError:
|
||||
# Use fake versions
|
||||
tty.warn("Couldn't detect version in: {0}".format(args.url))
|
||||
@@ -834,11 +844,7 @@ def get_versions(args, name):
|
||||
url_dict = {version: args.url}
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -96,22 +97,16 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"-v",
|
||||
"--with-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH, etc., with associated view",
|
||||
"-v",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for specific view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
"--without-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH, etc., with associated view",
|
||||
"-V",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -197,10 +192,20 @@ def env_activate(args):
|
||||
|
||||
# Activate new environment
|
||||
active_env = ev.Environment(env_path)
|
||||
|
||||
# Check if runtime environment variables are requested, and if so, for what view.
|
||||
view: Optional[str] = None
|
||||
if args.with_view:
|
||||
view = args.with_view
|
||||
if not active_env.has_view(view):
|
||||
tty.die(f"The environment does not have a view named '{view}'")
|
||||
elif not args.without_view and active_env.has_view(ev.default_view_name):
|
||||
view = ev.default_view_name
|
||||
|
||||
cmds += spack.environment.shell.activate_header(
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view
|
||||
)
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view))
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, view=view))
|
||||
cmds += env_mods.shell_modifications(args.shell)
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
@@ -375,28 +380,33 @@ def env_remove(args):
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
bad_envs = []
|
||||
for env_name in args.rm_env:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
except spack.config.ConfigFormatError:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
if not args.yes_to_all:
|
||||
answer = tty.get_yes_or_no(
|
||||
"Really remove %s %s?"
|
||||
% (
|
||||
string.plural(len(args.rm_env), "environment", show_n=False),
|
||||
string.comma_and(args.rm_env),
|
||||
),
|
||||
default=False,
|
||||
)
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in read_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die("Environment %s can't be removed while activated." % env.name)
|
||||
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg("Successfully removed environment '%s'" % env.name)
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
@@ -662,18 +672,31 @@ def env_depfile(args):
|
||||
# Currently only make is supported.
|
||||
spack.cmd.require_active_env(cmd_name="env depfile")
|
||||
|
||||
env = ev.active_environment()
|
||||
|
||||
# What things do we build when running make? By default, we build the
|
||||
# root specs. If specific specs are provided as input, we build those.
|
||||
filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None
|
||||
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
||||
model = depfile.MakefileModel.from_env(
|
||||
ev.active_environment(),
|
||||
env,
|
||||
filter_specs=filter_specs,
|
||||
pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]),
|
||||
dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]),
|
||||
make_prefix=args.make_prefix,
|
||||
jobserver=args.jobserver,
|
||||
)
|
||||
|
||||
# Warn in case we're generating a depfile for an empty environment. We don't automatically
|
||||
# concretize; the user should do that explicitly. Could be changed in the future if requested.
|
||||
if model.empty:
|
||||
if not env.user_specs:
|
||||
tty.warn("no specs in the environment")
|
||||
elif filter_specs is not None:
|
||||
tty.warn("no concrete matching specs found in environment")
|
||||
else:
|
||||
tty.warn("environment is not concretized. Run `spack concretize` first")
|
||||
|
||||
makefile = template.render(model.to_dict())
|
||||
|
||||
# Finally write to stdout/file.
|
||||
|
||||
@@ -72,6 +72,10 @@ def variant(s):
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
def license(s):
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
def __init__(self, variants):
|
||||
self.variants = variants
|
||||
@@ -348,6 +352,22 @@ def print_virtuals(pkg):
|
||||
color.cprint(" None")
|
||||
|
||||
|
||||
def print_licenses(pkg):
|
||||
"""Output the licenses of the project."""
|
||||
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Licenses: "))
|
||||
|
||||
if len(pkg.licenses) == 0:
|
||||
color.cprint(" None")
|
||||
else:
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
@@ -377,6 +397,7 @@ def info(parser, args):
|
||||
(args.all or not args.no_dependencies, print_dependencies),
|
||||
(args.all or args.virtuals, print_virtuals),
|
||||
(args.all or args.tests, print_tests),
|
||||
(args.all or True, print_licenses),
|
||||
]
|
||||
for print_it, func in sections:
|
||||
if print_it:
|
||||
|
||||
@@ -240,8 +240,7 @@ def default_log_file(spec):
|
||||
"""Computes the default filename for the log file and creates
|
||||
the corresponding directory if not present
|
||||
"""
|
||||
fmt = "test-{x.name}-{x.version}-{hash}.xml"
|
||||
basename = fmt.format(x=spec, hash=spec.dag_hash())
|
||||
basename = spec.format_path("test-{name}-{version}-{hash}.xml")
|
||||
dirname = fs.os.path.join(spack.paths.reports_path, "junit")
|
||||
fs.mkdirp(dirname)
|
||||
return fs.os.path.join(dirname, basename)
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
@@ -108,16 +110,14 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if "dependencies" in args.things_to_load:
|
||||
include_roots = "package" in args.things_to_load
|
||||
specs = [
|
||||
dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
|
||||
]
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec))
|
||||
env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -111,7 +111,7 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
arguments.add_s3_connection_args(add_parser, False)
|
||||
arguments.add_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
@@ -141,7 +141,7 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_s3_connection_args(set_url_parser, False)
|
||||
arguments.add_connection_args(set_url_parser, False)
|
||||
|
||||
# Set
|
||||
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
|
||||
@@ -170,7 +170,7 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_s3_connection_args(set_parser, False)
|
||||
arguments.add_connection_args(set_parser, False)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
@@ -192,6 +192,8 @@ def mirror_add(args):
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
@@ -202,6 +204,8 @@ def mirror_add(args):
|
||||
connection["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -235,6 +239,8 @@ def _configure_mirror(args):
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||
@@ -14,4 +15,4 @@
|
||||
|
||||
|
||||
def test_env(parser, args):
|
||||
env_utility.emulate_env_utility("test-env", "test", args)
|
||||
env_utility.emulate_env_utility("test-env", Context.TEST, args)
|
||||
|
||||
@@ -88,9 +88,8 @@ def unload(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs).reversed()
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
|
||||
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec):
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
|
||||
@@ -272,13 +272,6 @@ def _os_pkg_manager(self):
|
||||
raise spack.error.SpackError(msg)
|
||||
return os_pkg_manager
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
|
||||
@tengine.context_property
|
||||
def labels(self):
|
||||
return self.container_config.get("labels", {})
|
||||
|
||||
29
lib/spack/spack/context.py
Normal file
29
lib/spack/spack/context.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module provides classes used in user and build environment"""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Context(Enum):
|
||||
"""Enum used to indicate the context in which an environment has to be setup: build,
|
||||
run or test."""
|
||||
|
||||
BUILD = 1
|
||||
RUN = 2
|
||||
TEST = 3
|
||||
|
||||
def __str__(self):
|
||||
return ("build", "run", "test")[self.value - 1]
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, s: str):
|
||||
if s == "build":
|
||||
return Context.BUILD
|
||||
elif s == "run":
|
||||
return Context.RUN
|
||||
elif s == "test":
|
||||
return Context.TEST
|
||||
raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}")
|
||||
@@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]:
|
||||
|
||||
At the moment simply returns location of VS install paths from VSWhere
|
||||
But should be extended to include more information as relevant"""
|
||||
return list(winOs.WindowsOs.vs_install_paths)
|
||||
return list(winOs.WindowsOs().vs_install_paths)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||
@@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
plat_major_ver = None
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots() -> Optional[str]:
|
||||
def find_windows_kit_roots() -> List[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return None
|
||||
return []
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
)
|
||||
return kit_base
|
||||
kit_base = os.path.join(program_files, "Windows Kits", "**")
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
@@ -347,23 +347,30 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
|
||||
)
|
||||
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
|
||||
root_paths = []
|
||||
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
|
||||
root_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
|
||||
)
|
||||
return root_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
sdk_paths = []
|
||||
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
|
||||
windows_reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
|
||||
reg = windows_reg.get_subkey(key)
|
||||
sdk_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
)
|
||||
return sdk_paths
|
||||
|
||||
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
|
||||
@@ -15,9 +15,12 @@
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
import spack.util.environment as environment
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import (
|
||||
@@ -39,15 +42,29 @@
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
def common_windows_package_paths(pkg_cls=None) -> List[str]:
|
||||
"""Get the paths for common package installation location on Windows
|
||||
that are outside the PATH
|
||||
Returns [] on unix
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
paths.extend(find_win32_additional_install_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
|
||||
if pkg_cls:
|
||||
paths.extend(compute_windows_user_path_for_package(pkg_cls))
|
||||
paths.extend(compute_windows_program_path_for_package(pkg_cls))
|
||||
return paths
|
||||
|
||||
|
||||
def file_identifier(path):
|
||||
s = os.stat(path)
|
||||
return (s.st_dev, s.st_ino)
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
@@ -62,18 +79,44 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
path_hints.extend(common_windows_package_paths())
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def get_elf_compat(path):
|
||||
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
|
||||
it is host-compatible."""
|
||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
||||
# libraries, by dropping those that are not host compatible.
|
||||
with open(path, "rb") as f:
|
||||
elf = elf_utils.parse_elf(f, only_header=True)
|
||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
"""Accept an ELF file if the header matches the given compat triplet,
|
||||
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
|
||||
static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
# Fast path: assume libraries at least have .so in their basename.
|
||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||
if ".so" not in os.path.basename(path):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
try:
|
||||
return host_compat == get_elf_compat(path)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
|
||||
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||
standard system library paths.
|
||||
"""Get the paths of all libraries available from ``path_hints`` or the
|
||||
following defaults:
|
||||
|
||||
- Environment variables (Linux: ``LD_LIBRARY_PATH``, Darwin: ``DYLD_LIBRARY_PATH``,
|
||||
and ``DYLD_FALLBACK_LIBRARY_PATH``)
|
||||
- Dynamic linker default paths (glibc: ld.so.conf, musl: ld-musl-<arch>.path)
|
||||
- Default system library paths.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the library paths and the values are the names of the libraries
|
||||
@@ -87,31 +130,71 @@ def libraries_in_ld_and_system_library_path(
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
path_hints (list): list of paths to be searched. If ``None``, the default
|
||||
system paths are used.
|
||||
"""
|
||||
path_hints = (
|
||||
path_hints
|
||||
or spack.util.environment.get_path("LD_LIBRARY_PATH")
|
||||
+ spack.util.environment.get_path("DYLD_LIBRARY_PATH")
|
||||
+ spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")
|
||||
+ spack.util.ld_so_conf.host_dynamic_linker_search_paths()
|
||||
if path_hints:
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
else:
|
||||
search_paths = []
|
||||
|
||||
# Environment variables
|
||||
if sys.platform == "darwin":
|
||||
search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
|
||||
search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
|
||||
elif sys.platform.startswith("linux"):
|
||||
search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
|
||||
|
||||
# Dynamic linker paths
|
||||
search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
|
||||
|
||||
# Drop redundant paths
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
|
||||
try:
|
||||
host_compat = get_elf_compat(sys.executable)
|
||||
accept = lambda path: accept_elf(path, host_compat)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
accept = llnl.util.filesystem.is_readable_file
|
||||
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# search path entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if accept(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from the system PATH paths.
|
||||
|
||||
For more details, see `libraries_in_ld_and_system_library_path` regarding
|
||||
return type and contents.
|
||||
|
||||
Args:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
"""
|
||||
search_hints = (
|
||||
path_hints if path_hints is not None else spack.util.environment.get_path("PATH")
|
||||
)
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
# at the search root. Add both of those options to the search scheme
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints))
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
|
||||
if path_hints is None:
|
||||
# if no user provided path was given, add defaults to the search
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
@@ -125,19 +208,8 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
@@ -245,6 +317,8 @@ def find(
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
if None, default paths are searched. If this
|
||||
is an empty list, nothing will be searched.
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
@@ -252,13 +326,18 @@ def find(
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
if initial_guess is None:
|
||||
initial_guess = self.default_path_hints()
|
||||
initial_guess.extend(common_windows_package_paths(pkg_cls))
|
||||
candidates = self.candidate_files(patterns=patterns, paths=initial_guess)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return spack.util.environment.get_path("PATH")
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
@@ -298,7 +377,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(paths)
|
||||
else libraries_in_windows_paths(path_hints=paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
@@ -334,21 +413,16 @@ def by_path(
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
@@ -359,9 +433,13 @@ def by_path(
|
||||
if detected:
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except Exception:
|
||||
except concurrent.futures.TimeoutError:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
except Exception as e:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -64,6 +64,7 @@ class OpenMpi(Package):
|
||||
"depends_on",
|
||||
"extends",
|
||||
"maintainers",
|
||||
"license",
|
||||
"provides",
|
||||
"patch",
|
||||
"variant",
|
||||
@@ -572,17 +573,21 @@ def _execute_extends(pkg):
|
||||
return _execute_extends
|
||||
|
||||
|
||||
@directive("provided")
|
||||
def provides(*specs, **kwargs):
|
||||
"""Allows packages to provide a virtual dependency. If a package provides
|
||||
'mpi', other packages can declare that they depend on "mpi", and spack
|
||||
can use the providing package to satisfy the dependency.
|
||||
@directive(dicts=("provided", "provided_together"))
|
||||
def provides(*specs, when: Optional[str] = None):
|
||||
"""Allows packages to provide a virtual dependency.
|
||||
|
||||
If a package provides "mpi", other packages can declare that they depend on "mpi",
|
||||
and spack can use the providing package to satisfy the dependency.
|
||||
|
||||
Args:
|
||||
*specs: virtual specs provided by this package
|
||||
when: condition when this provides clause needs to be considered
|
||||
"""
|
||||
|
||||
def _execute_provides(pkg):
|
||||
import spack.parser # Avoid circular dependency
|
||||
|
||||
when = kwargs.get("when")
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
@@ -590,15 +595,18 @@ def _execute_provides(pkg):
|
||||
# ``when`` specs for ``provides()`` need a name, as they are used
|
||||
# to build the ProviderIndex.
|
||||
when_spec.name = pkg.name
|
||||
spec_objs = [spack.spec.Spec(x) for x in specs]
|
||||
spec_names = [x.name for x in spec_objs]
|
||||
if len(spec_names) > 1:
|
||||
pkg.provided_together.setdefault(when_spec, []).append(set(spec_names))
|
||||
|
||||
for string in specs:
|
||||
for provided_spec in spack.parser.parse(string):
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
for provided_spec in spec_objs:
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
|
||||
if provided_spec not in pkg.provided:
|
||||
pkg.provided[provided_spec] = set()
|
||||
pkg.provided[provided_spec].add(when_spec)
|
||||
if provided_spec not in pkg.provided:
|
||||
pkg.provided[provided_spec] = set()
|
||||
pkg.provided[provided_spec].add(when_spec)
|
||||
|
||||
return _execute_provides
|
||||
|
||||
@@ -862,6 +870,44 @@ def _execute_maintainer(pkg):
|
||||
return _execute_maintainer
|
||||
|
||||
|
||||
def _execute_license(pkg, license_identifier: str, when):
|
||||
# If when is not specified the license always holds
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
for other_when_spec in pkg.licenses:
|
||||
if when_spec.intersects(other_when_spec):
|
||||
when_message = ""
|
||||
if when_spec != make_when_spec(None):
|
||||
when_message = f"when {when_spec}"
|
||||
other_when_message = ""
|
||||
if other_when_spec != make_when_spec(None):
|
||||
other_when_message = f"when {other_when_spec}"
|
||||
err_msg = (
|
||||
f"{pkg.name} is specified as being licensed as {license_identifier} "
|
||||
f"{when_message}, but it is also specified as being licensed under "
|
||||
f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict."
|
||||
)
|
||||
raise OverlappingLicenseError(err_msg)
|
||||
|
||||
pkg.licenses[when_spec] = license_identifier
|
||||
|
||||
|
||||
@directive("licenses")
|
||||
def license(license_identifier: str, when=None):
|
||||
"""Add a new license directive, to specify the SPDX identifier the software is
|
||||
distributed under.
|
||||
|
||||
Args:
|
||||
license_identifiers: A list of SPDX identifiers specifying the licenses
|
||||
the software is distributed under.
|
||||
when: A spec specifying when the license applies.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_license(pkg, license_identifier, when)
|
||||
|
||||
|
||||
@directive("requirements")
|
||||
def requires(*requirement_specs, policy="one_of", when=None, msg=None):
|
||||
"""Allows a package to request a configuration to be present in all valid solutions.
|
||||
@@ -920,3 +966,7 @@ class DependencyPatchError(DirectiveError):
|
||||
|
||||
class UnsupportedPackageDirective(DirectiveError):
|
||||
"""Raised when an invalid or unsupported package directive is specified."""
|
||||
|
||||
|
||||
class OverlappingLicenseError(DirectiveError):
|
||||
"""Raised when two licenses are declared that apply on overlapping specs."""
|
||||
|
||||
@@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
path = spec.format_path(projection)
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
|
||||
@@ -365,6 +365,7 @@
|
||||
read,
|
||||
root,
|
||||
spack_env_var,
|
||||
spack_env_view_var,
|
||||
update_yaml,
|
||||
)
|
||||
|
||||
@@ -397,5 +398,6 @@
|
||||
"read",
|
||||
"root",
|
||||
"spack_env_var",
|
||||
"spack_env_view_var",
|
||||
"update_yaml",
|
||||
]
|
||||
|
||||
@@ -232,6 +232,10 @@ def to_dict(self):
|
||||
"pkg_ids": " ".join(self.all_pkg_identifiers),
|
||||
}
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
return len(self.roots) == 0
|
||||
|
||||
@staticmethod
|
||||
def from_env(
|
||||
env: ev.Environment,
|
||||
@@ -254,15 +258,10 @@ def from_env(
|
||||
jobserver: when enabled, make will invoke Spack with jobserver support. For
|
||||
dry-run this should be disabled.
|
||||
"""
|
||||
# If no specs are provided as a filter, build all the specs in the environment.
|
||||
if filter_specs:
|
||||
entrypoints = [env.matching_spec(s) for s in filter_specs]
|
||||
else:
|
||||
entrypoints = [s for _, s in env.concretized_specs()]
|
||||
|
||||
roots = env.all_matching_specs(*filter_specs) if filter_specs else env.concrete_roots()
|
||||
visitor = DepfileSpecVisitor(pkg_buildcache, dep_buildcache)
|
||||
traverse.traverse_breadth_first_with_visitor(
|
||||
entrypoints, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
|
||||
roots, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
|
||||
)
|
||||
|
||||
return MakefileModel(env, entrypoints, visitor.adjacency_list, make_prefix, jobserver)
|
||||
return MakefileModel(env, roots, visitor.adjacency_list, make_prefix, jobserver)
|
||||
|
||||
@@ -64,6 +64,8 @@
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
#: environment variable used to indicate the active environment view
|
||||
spack_env_view_var = "SPACK_ENV_VIEW"
|
||||
|
||||
#: currently activated environment
|
||||
_active_environment: Optional["Environment"] = None
|
||||
@@ -328,16 +330,21 @@ def create_in_dir(
|
||||
if with_view is None and keep_relative:
|
||||
return Environment(manifest_dir)
|
||||
|
||||
manifest = EnvironmentManifestFile(manifest_dir)
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(manifest_dir)
|
||||
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
|
||||
init_file = pathlib.Path(init_file)
|
||||
manifest.absolutify_dev_paths(init_file.parent)
|
||||
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
|
||||
init_file = pathlib.Path(init_file)
|
||||
manifest.absolutify_dev_paths(init_file.parent)
|
||||
|
||||
manifest.flush()
|
||||
manifest.flush()
|
||||
|
||||
except spack.config.ConfigFormatError as e:
|
||||
shutil.rmtree(manifest_dir)
|
||||
raise e
|
||||
|
||||
return Environment(manifest_dir)
|
||||
|
||||
@@ -1478,11 +1485,12 @@ def _concretize_separately(self, tests=False):
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
arguments, root_specs = [], []
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
arguments.append((uspec_constraints, tests))
|
||||
args.append((i, [str(x) for x in uspec_constraints], tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
@@ -1501,34 +1509,42 @@ def _concretize_separately(self, tests=False):
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
if len(args) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||
)
|
||||
num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
|
||||
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
msg = "Starting concretization"
|
||||
if sys.platform != "darwin":
|
||||
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
|
||||
if pool_size > 1:
|
||||
msg = msg + " pool with {0} processes".format(pool_size)
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
concretized_root_specs = spack.util.parallel.parallel_map(
|
||||
_concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug()
|
||||
)
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug()
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{root_specs[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
finish = time.time()
|
||||
tty.msg("Environment concretized in %.2f seconds." % (finish - start))
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
@@ -1595,16 +1611,14 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
if not self.views:
|
||||
raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name))
|
||||
|
||||
if default_view_name not in self.views:
|
||||
raise SpackEnvironmentError(
|
||||
"{0} does not have a default view enabled".format(self.name)
|
||||
)
|
||||
if not self.has_view(default_view_name):
|
||||
raise SpackEnvironmentError(f"{self.name} does not have a default view enabled")
|
||||
|
||||
return self.views[default_view_name]
|
||||
|
||||
def has_view(self, view_name: str) -> bool:
|
||||
return view_name in self.views
|
||||
|
||||
def update_default_view(self, path_or_bool: Union[str, bool]) -> None:
|
||||
"""Updates the path of the default view.
|
||||
|
||||
@@ -1690,62 +1704,34 @@ def check_views(self):
|
||||
"Loading the environment view will require reconcretization." % self.name
|
||||
)
|
||||
|
||||
def _env_modifications_for_default_view(self, reverse=False):
|
||||
all_mods = spack.util.environment.EnvironmentModifications()
|
||||
def _env_modifications_for_view(
|
||||
self, view: ViewDescriptor, reverse: bool = False
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
|
||||
except Exception as e:
|
||||
# Failing to setup spec-specific changes shouldn't be a hard error.
|
||||
tty.warn(
|
||||
"couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
|
||||
)
|
||||
return spack.util.environment.EnvironmentModifications()
|
||||
return mods.reversed() if reverse else mods
|
||||
|
||||
visited = set()
|
||||
|
||||
errors = []
|
||||
for root_spec in self.concrete_roots():
|
||||
if root_spec in self.default_view and root_spec.installed and root_spec.package:
|
||||
for spec in root_spec.traverse(deptype="run", root=True):
|
||||
if spec.name in visited:
|
||||
# It is expected that only one instance of the package
|
||||
# can be added to the environment - do not attempt to
|
||||
# add multiple.
|
||||
tty.debug(
|
||||
"Not adding {0} to shell modifications: "
|
||||
"this package has already been added".format(
|
||||
spec.format("{name}/{hash:7}")
|
||||
)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
visited.add(spec.name)
|
||||
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_spec(spec, self.default_view)
|
||||
except Exception as e:
|
||||
msg = "couldn't get environment settings for %s" % spec.format(
|
||||
"{name}@{version} /{hash:7}"
|
||||
)
|
||||
errors.append((msg, str(e)))
|
||||
continue
|
||||
|
||||
all_mods.extend(mods.reversed() if reverse else mods)
|
||||
|
||||
return all_mods, errors
|
||||
|
||||
def add_default_view_to_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to activate an environment using the
|
||||
default view. Removes duplicate paths.
|
||||
def add_view_to_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to activate an environment using the provided
|
||||
view. Removes duplicate paths.
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to activate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view))
|
||||
|
||||
mods, errors = self._env_modifications_for_default_view()
|
||||
env_mod.extend(mods)
|
||||
if errors:
|
||||
for err in errors:
|
||||
tty.warn(*err)
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor))
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor))
|
||||
|
||||
# deduplicate paths from specs mapped to the same location
|
||||
for env_var in env_mod.group_by_name():
|
||||
@@ -1753,23 +1739,21 @@ def add_default_view_to_env(self, env_mod):
|
||||
|
||||
return env_mod
|
||||
|
||||
def rm_default_view_from_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to deactivate an environment using the
|
||||
default view. Reverses the action of ``add_default_view_to_env``.
|
||||
def rm_view_from_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to deactivate an environment using the provided
|
||||
view. Reverses the action of ``add_view_to_env``.
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to deactivate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed())
|
||||
|
||||
mods, _ = self._env_modifications_for_default_view(reverse=True)
|
||||
env_mod.extend(mods)
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed())
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True))
|
||||
|
||||
return env_mod
|
||||
|
||||
@@ -2422,10 +2406,13 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments):
|
||||
spec_constraints, tests = packed_arguments
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
spec_constraints = [Spec(x) for x in spec_constraints]
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
return _concretize_from_constraints(spec_constraints, tests)
|
||||
start = time.time()
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
@@ -13,12 +14,14 @@
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def activate_header(env, shell, prompt=None):
|
||||
def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
# Construct the commands to run
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
# TODO: figure out how to make color work for csh
|
||||
cmds += "setenv SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "setenv SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += 'alias despacktivate "spack env deactivate";\n'
|
||||
if prompt:
|
||||
cmds += "if (! $?SPACK_OLD_PROMPT ) "
|
||||
@@ -29,6 +32,8 @@ def activate_header(env, shell, prompt=None):
|
||||
prompt = colorize("@G{%s} " % prompt, color=True)
|
||||
|
||||
cmds += "set -gx SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += "function despacktivate;\n"
|
||||
cmds += " spack env deactivate;\n"
|
||||
cmds += "end;\n"
|
||||
@@ -40,15 +45,21 @@ def activate_header(env, shell, prompt=None):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
|
||||
cmds += "export SPACK_ENV=%s;\n" % env.path
|
||||
if view:
|
||||
cmds += "export SPACK_ENV_VIEW=%s;\n" % view
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
if prompt:
|
||||
cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
@@ -66,12 +77,14 @@ def deactivate_header(shell):
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
cmds += "unsetenv SPACK_ENV;\n"
|
||||
cmds += "unsetenv SPACK_ENV_VIEW;\n"
|
||||
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
||||
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
|
||||
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
|
||||
cmds += "unalias despacktivate;\n"
|
||||
elif shell == "fish":
|
||||
cmds += "set -e SPACK_ENV;\n"
|
||||
cmds += "set -e SPACK_ENV_VIEW;\n"
|
||||
cmds += "functions -e despacktivate;\n"
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
@@ -79,14 +92,19 @@ def deactivate_header(shell):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n"
|
||||
cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n"
|
||||
@@ -100,24 +118,23 @@ def deactivate_header(shell):
|
||||
return cmds
|
||||
|
||||
|
||||
def activate(env, use_env_repo=False, add_view=True):
|
||||
"""
|
||||
Activate an environment and append environment modifications
|
||||
def activate(
|
||||
env: ev.Environment, use_env_repo=False, view: Optional[str] = "default"
|
||||
) -> EnvironmentModifications:
|
||||
"""Activate an environment and append environment modifications
|
||||
|
||||
To activate an environment, we add its configuration scope to the
|
||||
existing Spack configuration, and we set active to the current
|
||||
environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): the environment to activate
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
env: the environment to activate
|
||||
use_env_repo: use the packages exactly as they appear in the environment's repository
|
||||
view: generate commands to add runtime environment variables for named view
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
"""
|
||||
modifications to activate environment."""
|
||||
ev.activate(env, use_env_repo=use_env_repo)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
@@ -129,9 +146,9 @@ def activate(env, use_env_repo=False, add_view=True):
|
||||
# become PATH variables.
|
||||
#
|
||||
try:
|
||||
if add_view and ev.default_view_name in env.views:
|
||||
if view and env.has_view(view):
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env.add_default_view_to_env(env_mods)
|
||||
env.add_view_to_env(env_mods, view)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
@@ -145,17 +162,15 @@ def activate(env, use_env_repo=False, add_view=True):
|
||||
return env_mods
|
||||
|
||||
|
||||
def deactivate():
|
||||
"""
|
||||
Deactivate an environment and collect corresponding environment modifications.
|
||||
def deactivate() -> EnvironmentModifications:
|
||||
"""Deactivate an environment and collect corresponding environment modifications.
|
||||
|
||||
Note: unloads the environment in its current state, not in the state it was
|
||||
loaded in, meaning that specs that were removed from the spack environment
|
||||
after activation are not unloaded.
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
Environment variables modifications to activate environment.
|
||||
"""
|
||||
env_mods = EnvironmentModifications()
|
||||
active = ev.active_environment()
|
||||
@@ -163,10 +178,12 @@ def deactivate():
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
if ev.default_view_name in active.views:
|
||||
active_view = os.getenv(ev.spack_env_view_var)
|
||||
|
||||
if active_view and active.has_view(active_view):
|
||||
try:
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
active.rm_default_view_from_env(env_mods)
|
||||
active.rm_view_from_env(env_mods, active_view)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn(
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -41,6 +42,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.oci.opener
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.git
|
||||
@@ -537,6 +539,34 @@ def fetch(self):
|
||||
tty.msg("Using cached archive: {0}".format(path))
|
||||
|
||||
|
||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(url, checksum, **kwargs)
|
||||
|
||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
file = self.stage.save_filename
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except urllib.error.URLError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
"""Superclass for version control system fetch strategies.
|
||||
|
||||
@@ -743,8 +773,7 @@ def git(self):
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
if self.git_version >= spack.version.Version("1.7.2"):
|
||||
self._git.add_default_arg("-c")
|
||||
self._git.add_default_arg("advice.detachedHead=false")
|
||||
self._git.add_default_arg("-c", "advice.detachedHead=false")
|
||||
|
||||
# If the user asked for insecure fetching, make that work
|
||||
# with git as well.
|
||||
|
||||
@@ -500,7 +500,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, locator_spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, locator_spec.format(proj))
|
||||
return os.path.join(self._root, locator_spec.format_path(proj))
|
||||
return self._root
|
||||
|
||||
def get_all_specs(self):
|
||||
@@ -776,7 +776,7 @@ def get_relative_projection_for_spec(self, spec):
|
||||
spec = spec.package.extendee_spec
|
||||
|
||||
p = spack.projections.get_projection(self.projections, spec)
|
||||
return spec.format(p) if p else ""
|
||||
return spec.format_path(p) if p else ""
|
||||
|
||||
def get_projection_for_spec(self, spec):
|
||||
"""
|
||||
@@ -791,7 +791,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, spec.format(proj))
|
||||
return os.path.join(self._root, spec.format_path(proj))
|
||||
return self._root
|
||||
|
||||
|
||||
|
||||
@@ -528,10 +528,15 @@ def node_entry(self, node):
|
||||
|
||||
def edge_entry(self, edge):
|
||||
colormap = {"build": "dodgerblue", "link": "crimson", "run": "goldenrod"}
|
||||
label = ""
|
||||
if edge.virtuals:
|
||||
label = f" xlabel=\"virtuals={','.join(edge.virtuals)}\""
|
||||
return (
|
||||
edge.parent.dag_hash(),
|
||||
edge.spec.dag_hash(),
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\""
|
||||
+ label
|
||||
+ "]",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
|
||||
def _for_each_enabled(spec, method_name, explicit=None):
|
||||
"""Calls a method for each enabled module"""
|
||||
spack.modules.ensure_modules_are_enabled_or_warn()
|
||||
set_names = set(spack.config.get("modules", {}).keys())
|
||||
for name in set_names:
|
||||
enabled = spack.config.get("modules:%s:enable" % name)
|
||||
|
||||
@@ -1039,7 +1039,7 @@ def test_pkg_id(cls, spec):
|
||||
Returns:
|
||||
str: the install test package identifier
|
||||
"""
|
||||
return spec.format("{name}-{version}-{hash:7}")
|
||||
return spec.format_path("{name}-{version}-{hash:7}")
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
|
||||
@@ -131,12 +131,12 @@ def set_term_title(self, text: str):
|
||||
if not sys.stdout.isatty():
|
||||
return
|
||||
|
||||
status = "{0} {1}".format(text, self.get_progress())
|
||||
sys.stdout.write("\033]0;Spack: {0}\007".format(status))
|
||||
status = f"{text} {self.get_progress()}"
|
||||
sys.stdout.write(f"\x1b]0;Spack: {status}\x07")
|
||||
sys.stdout.flush()
|
||||
|
||||
def get_progress(self) -> str:
|
||||
return "[{0}/{1}]".format(self.pkg_num, self.pkg_count)
|
||||
return f"[{self.pkg_num}/{self.pkg_count}]"
|
||||
|
||||
|
||||
class TermStatusLine:
|
||||
@@ -175,7 +175,7 @@ def clear(self):
|
||||
|
||||
# Move the cursor to the beginning of the first "Waiting for" message and clear
|
||||
# everything after it.
|
||||
sys.stdout.write("\x1b[%sF\x1b[J" % lines)
|
||||
sys.stdout.write(f"\x1b[{lines}F\x1b[J")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
@@ -220,14 +220,13 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
|
||||
# consists in module file generation and registration in the DB.
|
||||
if pkg.spec.external:
|
||||
_process_external_package(pkg, explicit)
|
||||
_print_installed_pkg("{0} (external {1})".format(pkg.prefix, package_id(pkg)))
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
|
||||
return True
|
||||
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose(
|
||||
"{0} is installed in an upstream Spack instance at {1}".format(
|
||||
package_id(pkg), pkg.spec.prefix
|
||||
)
|
||||
f"{package_id(pkg)} is installed in an upstream Spack instance at "
|
||||
f"{pkg.spec.prefix}"
|
||||
)
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
|
||||
@@ -296,7 +295,7 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
package is the bootstrap compiler (``True``) or one of its dependencies
|
||||
(``False``). The list will be empty if there are no compilers.
|
||||
"""
|
||||
tty.debug("Bootstrapping {0} compiler".format(compiler))
|
||||
tty.debug(f"Bootstrapping {compiler} compiler")
|
||||
compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
|
||||
if compilers:
|
||||
return []
|
||||
@@ -305,9 +304,9 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
|
||||
# Set the architecture for the compiler package in a way that allows the
|
||||
# concretizer to back off if needed for the older bootstrapping compiler
|
||||
dep.constrain("platform=%s" % str(architecture.platform))
|
||||
dep.constrain("os=%s" % str(architecture.os))
|
||||
dep.constrain("target=%s:" % architecture.target.microarchitecture.family.name)
|
||||
dep.constrain(f"platform={str(architecture.platform)}")
|
||||
dep.constrain(f"os={str(architecture.os)}")
|
||||
dep.constrain(f"target={architecture.target.microarchitecture.family.name}:")
|
||||
# concrete CompilerSpec has less info than concrete Spec
|
||||
# concretize as Spec to add that information
|
||||
dep.concretize()
|
||||
@@ -340,15 +339,15 @@ def _hms(seconds: int) -> str:
|
||||
if m:
|
||||
parts.append("%dm" % m)
|
||||
if s:
|
||||
parts.append("%.2fs" % s)
|
||||
parts.append(f"{s:.2f}s")
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def _log_prefix(pkg_name) -> str:
|
||||
"""Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during
|
||||
the build."""
|
||||
pid = "{0}: ".format(os.getpid()) if tty.show_pid() else ""
|
||||
return "{0}{1}:".format(pid, pkg_name)
|
||||
pid = f"{os.getpid()}: " if tty.show_pid() else ""
|
||||
return f"{pid}{pkg_name}:"
|
||||
|
||||
|
||||
def _print_installed_pkg(message: str) -> None:
|
||||
@@ -375,9 +374,9 @@ def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
|
||||
|
||||
def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None:
|
||||
phases = ["{}: {}.".format(p.capitalize(), _hms(timer.duration(p))) for p in timer.phases]
|
||||
phases.append("Total: {}".format(_hms(timer.duration())))
|
||||
tty.msg("{0} Successfully installed {1}".format(pre, pkg_id), " ".join(phases))
|
||||
phases = [f"{p.capitalize()}: {_hms(timer.duration(p))}." for p in timer.phases]
|
||||
phases.append(f"Total: {_hms(timer.duration())}")
|
||||
tty.msg(f"{pre} Successfully installed {pkg_id}", " ".join(phases))
|
||||
|
||||
|
||||
def _install_from_cache(
|
||||
@@ -402,14 +401,14 @@ def _install_from_cache(
|
||||
)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
pre = "No binary for {0} found".format(pkg_id)
|
||||
pre = f"No binary for {pkg_id} found"
|
||||
if cache_only:
|
||||
tty.die("{0} when cache-only specified".format(pre))
|
||||
tty.die(f"{pre} when cache-only specified")
|
||||
|
||||
tty.msg("{0}: installing from source".format(pre))
|
||||
tty.msg(f"{pre}: installing from source")
|
||||
return False
|
||||
t.stop()
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||
@@ -430,19 +429,19 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
"""
|
||||
assert pkg.spec.external, "Expected to post-install/register an external package."
|
||||
|
||||
pre = "{s.name}@{s.version} :".format(s=pkg.spec)
|
||||
pre = f"{pkg.spec.name}@{pkg.spec.version} :"
|
||||
spec = pkg.spec
|
||||
|
||||
if spec.external_modules:
|
||||
tty.msg("{0} has external module in {1}".format(pre, spec.external_modules))
|
||||
tty.debug("{0} is actually installed in {1}".format(pre, spec.external_path))
|
||||
tty.msg(f"{pre} has external module in {spec.external_modules}")
|
||||
tty.debug(f"{pre} is actually installed in {spec.external_path}")
|
||||
else:
|
||||
tty.debug("{0} externally installed in {1}".format(pre, spec.external_path))
|
||||
tty.debug(f"{pre} externally installed in {spec.external_path}")
|
||||
|
||||
try:
|
||||
# Check if the package was already registered in the DB.
|
||||
# If this is the case, then only make explicit if required.
|
||||
tty.debug("{0} already registered in DB".format(pre))
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
@@ -451,11 +450,11 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
# If not, register it and generate the module file.
|
||||
# For external packages we just need to run
|
||||
# post-install hooks to generate module files.
|
||||
tty.debug("{0} generating module file".format(pre))
|
||||
tty.debug(f"{pre} generating module file")
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
||||
# Add to the DB
|
||||
tty.debug("{0} registering into DB".format(pre))
|
||||
tty.debug(f"{pre} registering into DB")
|
||||
spack.store.STORE.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
@@ -490,7 +489,7 @@ def _process_binary_cache_tarball(
|
||||
if download_result is None:
|
||||
return False
|
||||
|
||||
tty.msg("Extracting {0} from binary cache".format(package_id(pkg)))
|
||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
@@ -522,7 +521,7 @@ def _try_install_from_binary_cache(
|
||||
if not spack.mirror.MirrorCollection(binary=True):
|
||||
return False
|
||||
|
||||
tty.debug("Searching for binary cache of {0}".format(package_id(pkg)))
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
|
||||
|
||||
with timer.measure("search"):
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
@@ -590,9 +589,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
source_repo = spack.repo.Repo(source_repo_root)
|
||||
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
||||
except spack.repo.RepoError as err:
|
||||
tty.debug("Failed to create source repo for {0}: {1}".format(node.name, str(err)))
|
||||
tty.debug(f"Failed to create source repo for {node.name}: {str(err)}")
|
||||
source_pkg_dir = None
|
||||
tty.warn("Warning: Couldn't copy in provenance for {0}".format(node.name))
|
||||
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
|
||||
|
||||
# Create a destination repository
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
@@ -632,7 +631,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
|
||||
|
||||
Return: Colorized installing message
|
||||
"""
|
||||
pre = "{0}: ".format(pid) if tty.show_pid() else ""
|
||||
pre = f"{pid}: " if tty.show_pid() else ""
|
||||
post = (
|
||||
" @*{%s}" % install_status.get_progress()
|
||||
if install_status and spack.config.get("config:install_status", True)
|
||||
@@ -698,7 +697,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
# in the stage tree (not arbitrary files)
|
||||
abs_expr = os.path.realpath(glob_expr)
|
||||
if os.path.realpath(pkg.stage.path) not in abs_expr:
|
||||
errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr))
|
||||
errors.write(f"[OUTSIDE SOURCE PATH]: {glob_expr}\n")
|
||||
continue
|
||||
# Now that we are sure that the path is within the correct
|
||||
# folder, make it relative and check for matches
|
||||
@@ -718,14 +717,14 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
# Here try to be conservative, and avoid discarding
|
||||
# the whole install procedure because of copying a
|
||||
# single file failed
|
||||
errors.write("[FAILED TO ARCHIVE]: {0}".format(f))
|
||||
errors.write(f"[FAILED TO ARCHIVE]: {f}")
|
||||
|
||||
if errors.getvalue():
|
||||
error_file = os.path.join(target_dir, "errors.txt")
|
||||
fs.mkdirp(target_dir)
|
||||
with open(error_file, "w") as err:
|
||||
err.write(errors.getvalue())
|
||||
tty.warn("Errors occurred when archiving files.\n\t" "See: {0}".format(error_file))
|
||||
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
||||
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
@@ -761,11 +760,11 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
"""
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# Cache the package phase options with the explicit package,
|
||||
# popping the options to ensure installation of associated
|
||||
@@ -797,14 +796,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build request."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build request."""
|
||||
return "package={0}, install_args={1}".format(self.pkg.name, self.install_args)
|
||||
return f"package={self.pkg.name}, install_args={self.install_args}"
|
||||
|
||||
def _add_default_args(self) -> None:
|
||||
"""Ensure standard install options are set to at least the default."""
|
||||
@@ -930,18 +929,18 @@ def __init__(
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# The "unique" identifier for the task's package
|
||||
self.pkg_id = package_id(self.pkg)
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
raise ValueError("{0} must have a build request".format(str(pkg)))
|
||||
raise ValueError(f"{str(pkg)} must have a build request")
|
||||
|
||||
self.request = request
|
||||
|
||||
@@ -949,8 +948,9 @@ def __init__(
|
||||
# ensure priority queue invariants when tasks are "removed" from the
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
msg = "Cannot create a build task for {0} with status '{1}'"
|
||||
raise InstallError(msg.format(self.pkg_id, status), pkg=pkg)
|
||||
raise InstallError(
|
||||
f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
|
||||
self.status = status
|
||||
|
||||
@@ -964,9 +964,9 @@ def __init__(
|
||||
# to support tracking of parallel, multi-spec, environment installs.
|
||||
self.dependents = set(get_dependent_ids(self.pkg.spec))
|
||||
|
||||
tty.debug("Pkg id {0} has the following dependents:".format(self.pkg_id))
|
||||
tty.debug(f"Pkg id {self.pkg_id} has the following dependents:")
|
||||
for dep_id in self.dependents:
|
||||
tty.debug("- {0}".format(dep_id))
|
||||
tty.debug(f"- {dep_id}")
|
||||
|
||||
# Set of dependencies
|
||||
#
|
||||
@@ -988,9 +988,9 @@ def __init__(
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain("platform=%s" % str(arch_spec.platform))
|
||||
dep.constrain("os=%s" % str(arch_spec.os))
|
||||
dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package)
|
||||
self.dependencies.add(dep_id)
|
||||
@@ -1026,14 +1026,14 @@ def __ne__(self, other):
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build task."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build task."""
|
||||
dependencies = "#dependencies={0}".format(len(self.dependencies))
|
||||
dependencies = f"#dependencies={len(self.dependencies)}"
|
||||
return "priority={0}, status={1}, start={2}, {3}".format(
|
||||
self.priority, self.status, self.start, dependencies
|
||||
)
|
||||
@@ -1056,7 +1056,7 @@ def add_dependent(self, pkg_id: str) -> None:
|
||||
pkg_id: package identifier of the dependent package
|
||||
"""
|
||||
if pkg_id != self.pkg_id and pkg_id not in self.dependents:
|
||||
tty.debug("Adding {0} as a dependent of {1}".format(pkg_id, self.pkg_id))
|
||||
tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}")
|
||||
self.dependents.add(pkg_id)
|
||||
|
||||
def flag_installed(self, installed: List[str]) -> None:
|
||||
@@ -1070,9 +1070,8 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
for pkg_id in now_installed:
|
||||
self.uninstalled_deps.remove(pkg_id)
|
||||
tty.debug(
|
||||
"{0}: Removed {1} from uninstalled deps list: {2}".format(
|
||||
self.pkg_id, pkg_id, self.uninstalled_deps
|
||||
),
|
||||
f"{self.pkg_id}: Removed {pkg_id} from uninstalled deps list: "
|
||||
f"{self.uninstalled_deps}",
|
||||
level=2,
|
||||
)
|
||||
|
||||
@@ -1170,18 +1169,18 @@ def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the package installer."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the package installer."""
|
||||
requests = "#requests={0}".format(len(self.build_requests))
|
||||
tasks = "#tasks={0}".format(len(self.build_tasks))
|
||||
failed = "failed ({0}) = {1}".format(len(self.failed), self.failed)
|
||||
installed = "installed ({0}) = {1}".format(len(self.installed), self.installed)
|
||||
return "{0}: {1}; {2}; {3}; {4}".format(self.pid, requests, tasks, installed, failed)
|
||||
requests = f"#requests={len(self.build_requests)}"
|
||||
tasks = f"#tasks={len(self.build_tasks)}"
|
||||
failed = f"failed ({len(self.failed)}) = {self.failed}"
|
||||
installed = f"installed ({len(self.installed)}) = {self.installed}"
|
||||
return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}"
|
||||
|
||||
def _add_bootstrap_compilers(
|
||||
self,
|
||||
@@ -1226,9 +1225,7 @@ def _modify_existing_task(self, pkgid: str, attr, value) -> None:
|
||||
for i, tup in enumerate(self.build_pq):
|
||||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
|
||||
)
|
||||
tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
|
||||
@@ -1293,7 +1290,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
msg = f"{dep_id} is marked as an install failure: {action}"
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
|
||||
# Attempt to get a read lock to ensure another process does not
|
||||
@@ -1301,7 +1298,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
# installed
|
||||
ltype, lock = self._ensure_locked("read", dep_pkg)
|
||||
if lock is None:
|
||||
msg = "{0} is write locked by another process".format(dep_id)
|
||||
msg = f"{dep_id} is write locked by another process"
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg)
|
||||
|
||||
# Flag external and upstream packages as being installed
|
||||
@@ -1320,7 +1317,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
or rec.installation_time > request.overwrite_time
|
||||
)
|
||||
):
|
||||
tty.debug("Flagging {0} as installed per the database".format(dep_id))
|
||||
tty.debug(f"Flagging {dep_id} as installed per the database")
|
||||
self._flag_installed(dep_pkg)
|
||||
else:
|
||||
lock.release_read()
|
||||
@@ -1356,9 +1353,9 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
# Ensure there is no other installed spec with the same prefix dir
|
||||
if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix):
|
||||
raise InstallError(
|
||||
"Install prefix collision for {0}".format(task.pkg_id),
|
||||
long_msg="Prefix directory {0} already used by another "
|
||||
"installed spec.".format(task.pkg.spec.prefix),
|
||||
f"Install prefix collision for {task.pkg_id}",
|
||||
long_msg=f"Prefix directory {task.pkg.spec.prefix} already "
|
||||
"used by another installed spec.",
|
||||
pkg=task.pkg,
|
||||
)
|
||||
|
||||
@@ -1368,7 +1365,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
if not keep_prefix:
|
||||
task.pkg.remove_prefix()
|
||||
else:
|
||||
tty.debug("{0} is partially installed".format(task.pkg_id))
|
||||
tty.debug(f"{task.pkg_id} is partially installed")
|
||||
|
||||
# Destroy the stage for a locally installed, non-DIYStage, package
|
||||
if restage and task.pkg.stage.managed_by_spack:
|
||||
@@ -1413,9 +1410,8 @@ def _cleanup_failed(self, pkg_id: str) -> None:
|
||||
lock = self.failed.get(pkg_id, None)
|
||||
if lock is not None:
|
||||
err = "{0} exception when removing failure tracking for {1}: {2}"
|
||||
msg = "Removing failure mark on {0}"
|
||||
try:
|
||||
tty.verbose(msg.format(pkg_id))
|
||||
tty.verbose(f"Removing failure mark on {pkg_id}")
|
||||
lock.release_write()
|
||||
except Exception as exc:
|
||||
tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc)))
|
||||
@@ -1442,19 +1438,19 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
pkg: the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pre = "{0} cannot be installed locally:".format(pkg_id)
|
||||
pre = f"{pkg_id} cannot be installed locally:"
|
||||
|
||||
# External packages cannot be installed locally.
|
||||
if pkg.spec.external:
|
||||
raise ExternalPackageError("{0} {1}".format(pre, "is external"))
|
||||
raise ExternalPackageError(f"{pre} is external")
|
||||
|
||||
# Upstream packages cannot be installed locally.
|
||||
if pkg.spec.installed_upstream:
|
||||
raise UpstreamPackageError("{0} {1}".format(pre, "is upstream"))
|
||||
raise UpstreamPackageError(f"{pre} is upstream")
|
||||
|
||||
# The package must have a prefix lock at this stage.
|
||||
if pkg_id not in self.locks:
|
||||
raise InstallLockError("{0} {1}".format(pre, "not locked"))
|
||||
raise InstallLockError(f"{pre} not locked")
|
||||
|
||||
def _ensure_locked(
|
||||
self, lock_type: str, pkg: "spack.package_base.PackageBase"
|
||||
@@ -1481,14 +1477,14 @@ def _ensure_locked(
|
||||
assert lock_type in [
|
||||
"read",
|
||||
"write",
|
||||
], '"{0}" is not a supported package management lock type'.format(lock_type)
|
||||
], f'"{lock_type}" is not a supported package management lock type'
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
|
||||
if lock and ltype == lock_type:
|
||||
return ltype, lock
|
||||
|
||||
desc = "{0} lock".format(lock_type)
|
||||
desc = f"{lock_type} lock"
|
||||
msg = "{0} a {1} on {2} with timeout {3}"
|
||||
err = "Failed to {0} a {1} for {2} due to {3}: {4}"
|
||||
|
||||
@@ -1507,11 +1503,7 @@ def _ensure_locked(
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
timeout, lock.default_timeout
|
||||
)
|
||||
)
|
||||
tty.warn(f"Expected prefix lock timeout {timeout}, not {lock.default_timeout}")
|
||||
if lock_type == "read":
|
||||
lock.acquire_read()
|
||||
else:
|
||||
@@ -1536,7 +1528,7 @@ def _ensure_locked(
|
||||
tty.debug(msg.format("Upgrading to", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "upgrade to"
|
||||
lock.upgrade_read_to_write(timeout)
|
||||
tty.debug("{0} is now {1} locked".format(pkg_id, lock_type))
|
||||
tty.debug(f"{pkg_id} is now {lock_type} locked")
|
||||
|
||||
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
|
||||
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc)))
|
||||
@@ -1561,14 +1553,14 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
"""
|
||||
tty.debug("Initializing the build queue for {0}".format(request.pkg.name))
|
||||
tty.debug(f"Initializing the build queue for {request.pkg.name}")
|
||||
|
||||
# Ensure not attempting to perform an installation when user didn't
|
||||
# want to go that far for the requested package.
|
||||
try:
|
||||
_check_last_phase(request.pkg)
|
||||
except BadInstallPhase as err:
|
||||
tty.warn("Installation request refused: {0}".format(str(err)))
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
return
|
||||
|
||||
# Skip out early if the spec is not being installed locally (i.e., if
|
||||
@@ -1719,9 +1711,9 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
pid = "{0}: ".format(self.pid) if tty.show_pid() else ""
|
||||
tty.debug("{0}{1}".format(pid, str(e)))
|
||||
tty.debug("Package stage directory: {0}".format(pkg.stage.source_path))
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
|
||||
def _next_is_pri0(self) -> bool:
|
||||
"""
|
||||
@@ -1816,7 +1808,7 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
pkg_id: identifier for the package to be removed
|
||||
"""
|
||||
if pkg_id in self.build_tasks:
|
||||
tty.debug("Removing build task for {0} from list".format(pkg_id))
|
||||
tty.debug(f"Removing build task for {pkg_id} from list")
|
||||
task = self.build_tasks.pop(pkg_id)
|
||||
task.status = STATUS_REMOVED
|
||||
return task
|
||||
@@ -1832,10 +1824,8 @@ def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
|
||||
tty.debug(
|
||||
"{0} {1}".format(
|
||||
install_msg(task.pkg_id, self.pid, install_status),
|
||||
"in progress by another process",
|
||||
)
|
||||
f"{install_msg(task.pkg_id, self.pid, install_status)} "
|
||||
"in progress by another process"
|
||||
)
|
||||
|
||||
new_task = task.next_attempt(self.installed)
|
||||
@@ -1852,7 +1842,7 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug("Creating the installation directory {0}".format(path))
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
@@ -1888,8 +1878,8 @@ def _update_failed(
|
||||
exc: optional exception if associated with the failure
|
||||
"""
|
||||
pkg_id = task.pkg_id
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
err = "" if exc is None else f": {str(exc)}"
|
||||
tty.debug(f"Flagging {pkg_id} as failed{err}")
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
@@ -1898,14 +1888,14 @@ def _update_failed(
|
||||
|
||||
for dep_id in task.dependents:
|
||||
if dep_id in self.build_tasks:
|
||||
tty.warn("Skipping build of {0} since {1} failed".format(dep_id, pkg_id))
|
||||
tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed")
|
||||
# Ensure the dependent's uninstalled dependents are
|
||||
# up-to-date and their build tasks removed.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._update_failed(dep_task, mark)
|
||||
self._remove_task(dep_id)
|
||||
else:
|
||||
tty.debug("No build task for {0} to skip since {1} failed".format(dep_id, pkg_id))
|
||||
tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed")
|
||||
|
||||
def _update_installed(self, task: BuildTask) -> None:
|
||||
"""
|
||||
@@ -1935,23 +1925,21 @@ def _flag_installed(
|
||||
# Already determined the package has been installed
|
||||
return
|
||||
|
||||
tty.debug("Flagging {0} as installed".format(pkg_id))
|
||||
tty.debug(f"Flagging {pkg_id} as installed")
|
||||
|
||||
self.installed.add(pkg_id)
|
||||
|
||||
# Update affected dependents
|
||||
dependent_ids = dependent_ids or get_dependent_ids(pkg.spec)
|
||||
for dep_id in set(dependent_ids):
|
||||
tty.debug("Removing {0} from {1}'s uninstalled dependencies.".format(pkg_id, dep_id))
|
||||
tty.debug(f"Removing {pkg_id} from {dep_id}'s uninstalled dependencies.")
|
||||
if dep_id in self.build_tasks:
|
||||
# Ensure the dependent's uninstalled dependencies are
|
||||
# up-to-date. This will require requeueing the task.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._push_task(dep_task.next_attempt(self.installed))
|
||||
else:
|
||||
tty.debug(
|
||||
"{0} has no build task to update for {1}'s success".format(dep_id, pkg_id)
|
||||
)
|
||||
tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success")
|
||||
|
||||
def _init_queue(self) -> None:
|
||||
"""Initialize the build queue from the list of build requests."""
|
||||
@@ -2032,8 +2020,8 @@ def install(self) -> None:
|
||||
|
||||
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
||||
install_status.next_pkg(pkg)
|
||||
install_status.set_term_title("Processing {0}".format(pkg.name))
|
||||
tty.debug("Processing {0}: task={1}".format(pkg_id, task))
|
||||
install_status.set_term_title(f"Processing {pkg.name}")
|
||||
tty.debug(f"Processing {pkg_id}: task={task}")
|
||||
# Ensure that the current spec has NO uninstalled dependencies,
|
||||
# which is assumed to be reflected directly in its priority.
|
||||
#
|
||||
@@ -2045,24 +2033,19 @@ def install(self) -> None:
|
||||
if task.priority != 0:
|
||||
term_status.clear()
|
||||
tty.error(
|
||||
"Detected uninstalled dependencies for {0}: {1}".format(
|
||||
pkg_id, task.uninstalled_deps
|
||||
)
|
||||
f"Detected uninstalled dependencies for {pkg_id}: " f"{task.uninstalled_deps}"
|
||||
)
|
||||
left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed]
|
||||
if not left:
|
||||
tty.warn(
|
||||
"{0} does NOT actually have any uninstalled deps" " left".format(pkg_id)
|
||||
)
|
||||
tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left")
|
||||
dep_str = "dependencies" if task.priority > 1 else "dependency"
|
||||
|
||||
# Hook to indicate task failure, but without an exception
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
raise InstallError(
|
||||
"Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
|
||||
pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
|
||||
),
|
||||
f"Cannot proceed with {pkg_id}: {task.priority} uninstalled "
|
||||
f"{dep_str}: {','.join(task.uninstalled_deps)}",
|
||||
pkg=pkg,
|
||||
)
|
||||
|
||||
@@ -2079,7 +2062,7 @@ def install(self) -> None:
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
tty.warn(f"{pkg_id} failed to install")
|
||||
self._update_failed(task)
|
||||
|
||||
# Mark that the package failed
|
||||
@@ -2096,7 +2079,7 @@ def install(self) -> None:
|
||||
# another process is likely (un)installing the spec or has
|
||||
# determined the spec has already been installed (though the
|
||||
# other process may be hung).
|
||||
install_status.set_term_title("Acquiring lock for {0}".format(pkg.name))
|
||||
install_status.set_term_title(f"Acquiring lock for {pkg.name}")
|
||||
term_status.add(pkg_id)
|
||||
ltype, lock = self._ensure_locked("write", pkg)
|
||||
if lock is None:
|
||||
@@ -2119,7 +2102,7 @@ def install(self) -> None:
|
||||
task.request.overwrite_time = time.time()
|
||||
|
||||
# Determine state of installation artifacts and adjust accordingly.
|
||||
install_status.set_term_title("Preparing {0}".format(pkg.name))
|
||||
install_status.set_term_title(f"Preparing {pkg.name}")
|
||||
self._prepare_for_install(task)
|
||||
|
||||
# Flag an already installed package
|
||||
@@ -2165,7 +2148,7 @@ def install(self) -> None:
|
||||
|
||||
# Proceed with the installation since we have an exclusive write
|
||||
# lock on the package.
|
||||
install_status.set_term_title("Installing {0}".format(pkg.name))
|
||||
install_status.set_term_title(f"Installing {pkg.name}")
|
||||
try:
|
||||
action = self._install_action(task)
|
||||
|
||||
@@ -2186,8 +2169,9 @@ def install(self) -> None:
|
||||
except KeyboardInterrupt as exc:
|
||||
# The build has been terminated with a Ctrl-C so terminate
|
||||
# regardless of the number of remaining specs.
|
||||
err = "Failed to install {0} due to {1}: {2}"
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__, str(exc)))
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
spack.hooks.on_install_cancel(task.request.pkg.spec)
|
||||
raise
|
||||
|
||||
@@ -2196,9 +2180,10 @@ def install(self) -> None:
|
||||
raise
|
||||
|
||||
# Checking hash on downloaded binary failed.
|
||||
err = "Failed to install {0} from binary cache due to {1}:"
|
||||
err += " Requeueing to install from source."
|
||||
tty.error(err.format(pkg.name, str(exc)))
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} from binary cache due "
|
||||
f"to {str(exc)}: Requeueing to install from source."
|
||||
)
|
||||
# this overrides a full method, which is ugly.
|
||||
task.use_cache = False # type: ignore[misc]
|
||||
self._requeue_task(task, install_status)
|
||||
@@ -2216,13 +2201,12 @@ def install(self) -> None:
|
||||
# lower levels -- skip printing if already printed.
|
||||
# TODO: sort out this and SpackError.print_context()
|
||||
tty.error(
|
||||
"Failed to install {0} due to {1}: {2}".format(
|
||||
pkg.name, exc.__class__.__name__, str(exc)
|
||||
)
|
||||
f"Failed to install {pkg.name} due to "
|
||||
f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg)
|
||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
|
||||
# Terminate at this point if the single explicit spec has
|
||||
# failed to install.
|
||||
@@ -2261,17 +2245,17 @@ def install(self) -> None:
|
||||
|
||||
if failed_explicits or missing:
|
||||
for _, pkg_id, err in failed_explicits:
|
||||
tty.error("{0}: {1}".format(pkg_id, err))
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error("{0}: Package was not installed".format(pkg_id))
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
||||
if len(failed_explicits) > 0:
|
||||
pkg = failed_explicits[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||
tty.debug(
|
||||
"Associating installation failure with first failed "
|
||||
"explicit package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||
)
|
||||
|
||||
elif len(missing) > 0:
|
||||
@@ -2279,7 +2263,7 @@ def install(self) -> None:
|
||||
ids = [pkg_id for _, pkg_id in missing]
|
||||
tty.debug(
|
||||
"Associating installation failure with first "
|
||||
"missing package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
f"missing package ({ids[0]}) from {', '.join(ids)}"
|
||||
)
|
||||
|
||||
raise InstallError(
|
||||
@@ -2357,7 +2341,7 @@ def run(self) -> bool:
|
||||
self.timer.stop("stage")
|
||||
|
||||
tty.debug(
|
||||
"{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501
|
||||
f"{self.pre} Building {self.pkg_id} [{self.pkg.build_system_class}]" # type: ignore[attr-defined] # noqa: E501
|
||||
)
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
@@ -2402,7 +2386,7 @@ def _install_source(self) -> None:
|
||||
return
|
||||
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||
tty.debug(f"{self.pre} Copying source to {src_target}")
|
||||
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
@@ -2464,8 +2448,7 @@ def _real_install(self) -> None:
|
||||
with logger.force_echo():
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
msg = "{0} Executing phase: '{1}'"
|
||||
tty.msg(msg.format(self.pre, phase_fn.name))
|
||||
tty.msg(f"{self.pre} Executing phase: '{phase_fn.name}'")
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Catch any errors to report to logging
|
||||
@@ -2539,12 +2522,9 @@ def install(self):
|
||||
except fs.CouldNotRestoreDirectoryBackup as e:
|
||||
self.database.remove(self.task.pkg.spec)
|
||||
tty.error(
|
||||
"Recovery of install dir of {0} failed due to "
|
||||
"{1}: {2}. The spec is now uninstalled.".format(
|
||||
self.task.pkg.name,
|
||||
e.outer_exception.__class__.__name__,
|
||||
str(e.outer_exception),
|
||||
)
|
||||
f"Recovery of install dir of {self.task.pkg.name} failed due to "
|
||||
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
|
||||
"The spec is now uninstalled."
|
||||
)
|
||||
|
||||
# Unwrap the actual installation exception.
|
||||
@@ -2567,7 +2547,7 @@ class BadInstallPhase(InstallError):
|
||||
"""Raised for an install phase option is not allowed for a package."""
|
||||
|
||||
def __init__(self, pkg_name, phase):
|
||||
super().__init__("'{0}' is not a valid phase for package {1}".format(phase, pkg_name))
|
||||
super().__init__(f"'{phase}' is not a valid phase for package {pkg_name}")
|
||||
|
||||
|
||||
class ExternalPackageError(InstallError):
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.tty as tty
|
||||
@@ -27,18 +27,18 @@
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
from spack.version import VersionList
|
||||
import spack.version
|
||||
|
||||
#: What schemes do we support
|
||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs")
|
||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
|
||||
|
||||
|
||||
def _url_or_path_to_url(url_or_path: str) -> str:
|
||||
@@ -230,12 +230,12 @@ def _get_value(self, attribute: str, direction: str):
|
||||
value = self._data.get(direction, {})
|
||||
|
||||
# Return top-level entry if only a URL was set.
|
||||
if isinstance(value, str):
|
||||
return self._data.get(attribute, None)
|
||||
if isinstance(value, str) or attribute not in value:
|
||||
return self._data.get(attribute)
|
||||
|
||||
return self._data.get(direction, {}).get(attribute, None)
|
||||
return value[attribute]
|
||||
|
||||
def get_url(self, direction: str):
|
||||
def get_url(self, direction: str) -> str:
|
||||
if direction not in ("fetch", "push"):
|
||||
raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}")
|
||||
|
||||
@@ -255,18 +255,21 @@ def get_url(self, direction: str):
|
||||
elif "url" in info:
|
||||
url = info["url"]
|
||||
|
||||
return _url_or_path_to_url(url) if url else None
|
||||
if not url:
|
||||
raise ValueError(f"Mirror {self.name} has no URL configured")
|
||||
|
||||
def get_access_token(self, direction: str):
|
||||
return _url_or_path_to_url(url)
|
||||
|
||||
def get_access_token(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("access_token", direction)
|
||||
|
||||
def get_access_pair(self, direction: str):
|
||||
def get_access_pair(self, direction: str) -> Optional[List]:
|
||||
return self._get_value("access_pair", direction)
|
||||
|
||||
def get_profile(self, direction: str):
|
||||
def get_profile(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("profile", direction)
|
||||
|
||||
def get_endpoint_url(self, direction: str):
|
||||
def get_endpoint_url(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("endpoint_url", direction)
|
||||
|
||||
|
||||
@@ -330,7 +333,7 @@ def from_json(stream, name=None):
|
||||
raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
|
||||
|
||||
def to_dict(self, recursive=False):
|
||||
return syaml_dict(
|
||||
return syaml.syaml_dict(
|
||||
sorted(
|
||||
((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()),
|
||||
key=operator.itemgetter(0),
|
||||
@@ -372,7 +375,7 @@ def __len__(self):
|
||||
|
||||
|
||||
def _determine_extension(fetcher):
|
||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
|
||||
if fetcher.expand_archive:
|
||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
||||
@@ -437,6 +440,19 @@ def __iter__(self):
|
||||
yield self.cosmetic_path
|
||||
|
||||
|
||||
class OCIImageLayout:
|
||||
"""Follow the OCI Image Layout Specification to archive blobs
|
||||
|
||||
Paths are of the form `blobs/<algorithm>/<digest>`
|
||||
"""
|
||||
|
||||
def __init__(self, digest: spack.oci.image.Digest) -> None:
|
||||
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
|
||||
|
||||
def __iter__(self):
|
||||
yield self.storage_path
|
||||
|
||||
|
||||
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
@@ -482,7 +498,7 @@ def get_all_versions(specs):
|
||||
|
||||
for version in pkg_cls.versions:
|
||||
version_spec = spack.spec.Spec(pkg_cls.name)
|
||||
version_spec.versions = VersionList([version])
|
||||
version_spec.versions = spack.version.VersionList([version])
|
||||
version_specs.append(version_spec)
|
||||
|
||||
return version_specs
|
||||
@@ -521,7 +537,7 @@ def get_matching_versions(specs, num_versions=1):
|
||||
# Generate only versions that satisfy the spec.
|
||||
if spec.concrete or v.intersects(spec.versions):
|
||||
s = spack.spec.Spec(pkg.name)
|
||||
s.versions = VersionList([v])
|
||||
s.versions = spack.version.VersionList([v])
|
||||
s.variants = spec.variants.copy()
|
||||
# This is needed to avoid hanging references during the
|
||||
# concretization phase
|
||||
@@ -591,14 +607,14 @@ def add(mirror: Mirror, scope=None):
|
||||
"""Add a named mirror in the given scope"""
|
||||
mirrors = spack.config.get("mirrors", scope=scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
mirrors = syaml.syaml_dict()
|
||||
|
||||
if mirror.name in mirrors:
|
||||
tty.die("Mirror with name {} already exists.".format(mirror.name))
|
||||
|
||||
items = [(n, u) for n, u in mirrors.items()]
|
||||
items.insert(0, (mirror.name, mirror.to_dict()))
|
||||
mirrors = syaml_dict(items)
|
||||
mirrors = syaml.syaml_dict(items)
|
||||
spack.config.set("mirrors", mirrors, scope=scope)
|
||||
|
||||
|
||||
@@ -606,7 +622,7 @@ def remove(name, scope):
|
||||
"""Remove the named mirror in the given scope"""
|
||||
mirrors = spack.config.get("mirrors", scope=scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
mirrors = syaml.syaml_dict()
|
||||
|
||||
if name not in mirrors:
|
||||
tty.die("No mirror with name %s" % name)
|
||||
|
||||
@@ -7,15 +7,10 @@
|
||||
include Tcl non-hierarchical modules, Lua hierarchical modules, and others.
|
||||
"""
|
||||
|
||||
from .common import disable_modules, ensure_modules_are_enabled_or_warn
|
||||
from .common import disable_modules
|
||||
from .lmod import LmodModulefileWriter
|
||||
from .tcl import TclModulefileWriter
|
||||
|
||||
__all__ = [
|
||||
"TclModulefileWriter",
|
||||
"LmodModulefileWriter",
|
||||
"disable_modules",
|
||||
"ensure_modules_are_enabled_or_warn",
|
||||
]
|
||||
__all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"]
|
||||
|
||||
module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter}
|
||||
|
||||
@@ -33,10 +33,8 @@
|
||||
import datetime
|
||||
import inspect
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import string
|
||||
import warnings
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -58,6 +56,7 @@
|
||||
import spack.util.file_permissions as fp
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.context import Context
|
||||
|
||||
|
||||
#: config section for this file
|
||||
@@ -492,10 +491,6 @@ def excluded(self):
|
||||
exclude_rules = conf.get("exclude", [])
|
||||
exclude_matches = [x for x in exclude_rules if spec.satisfies(x)]
|
||||
|
||||
# Should I exclude the module because it's implicit?
|
||||
exclude_implicits = conf.get("exclude_implicits", None)
|
||||
excluded_as_implicit = exclude_implicits and not self.explicit
|
||||
|
||||
def debug_info(line_header, match_list):
|
||||
if match_list:
|
||||
msg = "\t{0} : {1}".format(line_header, spec.cshort_spec)
|
||||
@@ -506,16 +501,28 @@ def debug_info(line_header, match_list):
|
||||
debug_info("INCLUDE", include_matches)
|
||||
debug_info("EXCLUDE", exclude_matches)
|
||||
|
||||
if excluded_as_implicit:
|
||||
msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec)
|
||||
tty.debug(msg)
|
||||
|
||||
is_excluded = exclude_matches or excluded_as_implicit
|
||||
if not include_matches and is_excluded:
|
||||
if not include_matches and exclude_matches:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
"""Returns True if the module has been hidden, False otherwise."""
|
||||
|
||||
# A few variables for convenience of writing the method
|
||||
spec = self.spec
|
||||
conf = self.module.configuration(self.name)
|
||||
|
||||
hidden_as_implicit = not self.explicit and conf.get(
|
||||
"hide_implicits", conf.get("exclude_implicits", False)
|
||||
)
|
||||
|
||||
if hidden_as_implicit:
|
||||
tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}")
|
||||
|
||||
return hidden_as_implicit
|
||||
|
||||
@property
|
||||
def context(self):
|
||||
return self.conf.get("context", {})
|
||||
@@ -588,7 +595,7 @@ def use_name(self):
|
||||
if not projection:
|
||||
projection = self.conf.default_projections["all"]
|
||||
|
||||
name = self.spec.format(projection)
|
||||
name = self.spec.format_path(projection)
|
||||
# Not everybody is working on linux...
|
||||
parts = name.split("/")
|
||||
name = os.path.join(*parts)
|
||||
@@ -719,10 +726,16 @@ def environment_modifications(self):
|
||||
)
|
||||
|
||||
# Let the extendee/dependency modify their extensions/dependencies
|
||||
# before asking for package-specific modifications
|
||||
env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run"))
|
||||
# Package specific modifications
|
||||
spack.build_environment.set_module_variables_for_package(spec.package)
|
||||
|
||||
# The only thing we care about is `setup_dependent_run_environment`, but
|
||||
# for that to work, globals have to be set on the package modules, and the
|
||||
# whole chain of setup_dependent_package has to be followed from leaf to spec.
|
||||
# So: just run it here, but don't collect env mods.
|
||||
spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals()
|
||||
|
||||
# Then run setup_dependent_run_environment before setup_run_environment.
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
dep.package.setup_dependent_run_environment(env, spec)
|
||||
spec.package.setup_run_environment(env)
|
||||
|
||||
# Modifications required from modules.yaml
|
||||
@@ -820,43 +833,6 @@ def verbose(self):
|
||||
return self.conf.verbose
|
||||
|
||||
|
||||
def ensure_modules_are_enabled_or_warn():
|
||||
"""Ensures that, if a custom configuration file is found with custom configuration for the
|
||||
default tcl module set, then tcl module file generation is enabled. Otherwise, a warning
|
||||
is emitted.
|
||||
"""
|
||||
|
||||
# TODO (v0.21 - Remove this function)
|
||||
# Check if TCL module generation is enabled, return early if it is
|
||||
enabled = spack.config.get("modules:default:enable", [])
|
||||
if "tcl" in enabled:
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
data = spack.config.get("modules:default:tcl", scope=scope.name)
|
||||
if data:
|
||||
config_file = pathlib.Path(scope.path)
|
||||
if not scope.name.startswith("env"):
|
||||
config_file = config_file / "modules.yaml"
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
# If we are here we have a custom "modules" section in "config_file"
|
||||
msg = (
|
||||
f"detected custom TCL modules configuration in {config_file}, while TCL module file "
|
||||
f"generation for the default module set is disabled. "
|
||||
f"In Spack v0.20 module file generation has been disabled by default. To enable "
|
||||
f"it run:\n\n\t$ spack config add 'modules:default:enable:[tcl]'\n"
|
||||
)
|
||||
warnings.warn(msg)
|
||||
|
||||
|
||||
class BaseModuleFileWriter:
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
self.spec = spec
|
||||
@@ -881,6 +857,26 @@ def __init__(self, spec, module_set_name, explicit=None):
|
||||
name = type(self).__name__
|
||||
raise DefaultTemplateNotDefined(msg.format(name))
|
||||
|
||||
# Check if format for module hide command has been defined,
|
||||
# throw if not found
|
||||
try:
|
||||
self.hide_cmd_format
|
||||
except AttributeError:
|
||||
msg = "'{0}' object has no attribute 'hide_cmd_format'\n"
|
||||
msg += "Did you forget to define it in the class?"
|
||||
name = type(self).__name__
|
||||
raise HideCmdFormatNotDefined(msg.format(name))
|
||||
|
||||
# Check if modulerc header content has been defined,
|
||||
# throw if not found
|
||||
try:
|
||||
self.modulerc_header
|
||||
except AttributeError:
|
||||
msg = "'{0}' object has no attribute 'modulerc_header'\n"
|
||||
msg += "Did you forget to define it in the class?"
|
||||
name = type(self).__name__
|
||||
raise ModulercHeaderNotDefined(msg.format(name))
|
||||
|
||||
def _get_template(self):
|
||||
"""Gets the template that will be rendered for this spec."""
|
||||
# Get templates and put them in the order of importance:
|
||||
@@ -975,6 +971,9 @@ def write(self, overwrite=False):
|
||||
# Symlink defaults if needed
|
||||
self.update_module_defaults()
|
||||
|
||||
# record module hiddenness if implicit
|
||||
self.update_module_hiddenness()
|
||||
|
||||
def update_module_defaults(self):
|
||||
if any(self.spec.satisfies(default) for default in self.conf.defaults):
|
||||
# This spec matches a default, it needs to be symlinked to default
|
||||
@@ -985,6 +984,60 @@ def update_module_defaults(self):
|
||||
os.symlink(self.layout.filename, default_tmp)
|
||||
os.rename(default_tmp, default_path)
|
||||
|
||||
def update_module_hiddenness(self, remove=False):
|
||||
"""Update modulerc file corresponding to module to add or remove
|
||||
command that hides module depending on its hidden state.
|
||||
|
||||
Args:
|
||||
remove (bool): if True, hiddenness information for module is
|
||||
removed from modulerc.
|
||||
"""
|
||||
modulerc_path = self.layout.modulerc
|
||||
hide_module_cmd = self.hide_cmd_format % self.layout.use_name
|
||||
hidden = self.conf.hidden and not remove
|
||||
modulerc_exists = os.path.exists(modulerc_path)
|
||||
updated = False
|
||||
|
||||
if modulerc_exists:
|
||||
# retrieve modulerc content
|
||||
with open(modulerc_path, "r") as f:
|
||||
content = f.readlines()
|
||||
content = "".join(content).split("\n")
|
||||
# remove last empty item if any
|
||||
if len(content[-1]) == 0:
|
||||
del content[-1]
|
||||
already_hidden = hide_module_cmd in content
|
||||
|
||||
# remove hide command if module not hidden
|
||||
if already_hidden and not hidden:
|
||||
content.remove(hide_module_cmd)
|
||||
updated = True
|
||||
|
||||
# add hide command if module is hidden
|
||||
elif not already_hidden and hidden:
|
||||
if len(content) == 0:
|
||||
content = self.modulerc_header.copy()
|
||||
content.append(hide_module_cmd)
|
||||
updated = True
|
||||
else:
|
||||
content = self.modulerc_header.copy()
|
||||
if hidden:
|
||||
content.append(hide_module_cmd)
|
||||
updated = True
|
||||
|
||||
# no modulerc file change if no content update
|
||||
if updated:
|
||||
is_empty = content == self.modulerc_header or len(content) == 0
|
||||
# remove existing modulerc if empty
|
||||
if modulerc_exists and is_empty:
|
||||
os.remove(modulerc_path)
|
||||
# create or update modulerc
|
||||
elif content != self.modulerc_header:
|
||||
# ensure file ends with a newline character
|
||||
content.append("")
|
||||
with open(modulerc_path, "w") as f:
|
||||
f.write("\n".join(content))
|
||||
|
||||
def remove(self):
|
||||
"""Deletes the module file."""
|
||||
mod_file = self.layout.filename
|
||||
@@ -992,6 +1045,7 @@ def remove(self):
|
||||
try:
|
||||
os.remove(mod_file) # Remove the module file
|
||||
self.remove_module_defaults() # Remove default targeting module file
|
||||
self.update_module_hiddenness(remove=True) # Remove hide cmd in modulerc
|
||||
os.removedirs(
|
||||
os.path.dirname(mod_file)
|
||||
) # Remove all the empty directories from the leaf up
|
||||
@@ -1035,5 +1089,17 @@ class DefaultTemplateNotDefined(AttributeError, ModulesError):
|
||||
"""
|
||||
|
||||
|
||||
class HideCmdFormatNotDefined(AttributeError, ModulesError):
|
||||
"""Raised if the attribute 'hide_cmd_format' has not been specified
|
||||
in the derived classes.
|
||||
"""
|
||||
|
||||
|
||||
class ModulercHeaderNotDefined(AttributeError, ModulesError):
|
||||
"""Raised if the attribute 'modulerc_header' has not been specified
|
||||
in the derived classes.
|
||||
"""
|
||||
|
||||
|
||||
class ModulesTemplateNotFoundError(ModulesError, RuntimeError):
|
||||
"""Raised if the template for a module file was not found."""
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import posixpath
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
|
||||
import spack.compilers
|
||||
@@ -231,6 +232,13 @@ def missing(self):
|
||||
"""Returns the list of tokens that are not available."""
|
||||
return [x for x in self.hierarchy_tokens if x not in self.available]
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
# Never hide a module that opens a hierarchy
|
||||
if any(self.spec.package.provides(x) for x in self.hierarchy_tokens):
|
||||
return False
|
||||
return super().hidden
|
||||
|
||||
|
||||
class LmodFileLayout(BaseFileLayout):
|
||||
"""File layout for lmod module files."""
|
||||
@@ -273,6 +281,13 @@ def filename(self):
|
||||
)
|
||||
return fullname
|
||||
|
||||
@property
|
||||
def modulerc(self):
|
||||
"""Returns the modulerc file associated with current module file"""
|
||||
return os.path.join(
|
||||
os.path.dirname(self.filename), ".".join([".modulerc", self.extension])
|
||||
)
|
||||
|
||||
def token_to_path(self, name, value):
|
||||
"""Transforms a hierarchy token into the corresponding path part.
|
||||
|
||||
@@ -283,8 +298,10 @@ def token_to_path(self, name, value):
|
||||
Returns:
|
||||
str: part of the path associated with the service
|
||||
"""
|
||||
|
||||
# General format for the path part
|
||||
path_part_fmt = os.path.join("{token.name}", "{token.version}")
|
||||
def path_part_fmt(token):
|
||||
return fs.polite_path([f"{token.name}", f"{token.version}"])
|
||||
|
||||
# If we are dealing with a core compiler, return 'Core'
|
||||
core_compilers = self.conf.core_compilers
|
||||
@@ -296,13 +313,13 @@ def token_to_path(self, name, value):
|
||||
# CompilerSpec does not have a hash, as we are not allowed to
|
||||
# use different flavors of the same compiler
|
||||
if name == "compiler":
|
||||
return path_part_fmt.format(token=value)
|
||||
return path_part_fmt(token=value)
|
||||
|
||||
# In case the hierarchy token refers to a virtual provider
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
path = path_part_fmt.format(token=value)
|
||||
path = path_part_fmt(token=value)
|
||||
path = "-".join([path, value.dag_hash(length=7)])
|
||||
return path
|
||||
|
||||
@@ -467,6 +484,10 @@ class LmodModulefileWriter(BaseModuleFileWriter):
|
||||
|
||||
default_template = posixpath.join("modules", "modulefile.lua")
|
||||
|
||||
modulerc_header: list = []
|
||||
|
||||
hide_cmd_format = 'hide_version("%s")'
|
||||
|
||||
|
||||
class CoreCompilersNotFoundError(spack.error.SpackError, KeyError):
|
||||
"""Error raised if the key 'core_compilers' has not been specified
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"""This module implements the classes necessary to generate Tcl
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
|
||||
@@ -56,6 +57,11 @@ class TclConfiguration(BaseConfiguration):
|
||||
class TclFileLayout(BaseFileLayout):
|
||||
"""File layout for tcl module files."""
|
||||
|
||||
@property
|
||||
def modulerc(self):
|
||||
"""Returns the modulerc file associated with current module file"""
|
||||
return os.path.join(os.path.dirname(self.filename), ".modulerc")
|
||||
|
||||
|
||||
class TclContext(BaseContext):
|
||||
"""Context class for tcl module files."""
|
||||
@@ -73,3 +79,7 @@ class TclModulefileWriter(BaseModuleFileWriter):
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_template = posixpath.join("modules", "modulefile.tcl")
|
||||
|
||||
modulerc_header = ["#%Module4.7"]
|
||||
|
||||
hide_cmd_format = "module-hide --soft --hidden-loaded %s"
|
||||
|
||||
4
lib/spack/spack/oci/__init__.py
Normal file
4
lib/spack/spack/oci/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
235
lib/spack/spack/oci/image.py
Normal file
235
lib/spack/spack/oci/image.py
Normal file
@@ -0,0 +1,235 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
|
||||
import spack.spec
|
||||
|
||||
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
||||
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
||||
# case sensitive though.
|
||||
alphanumeric_with_uppercase = r"[a-zA-Z0-9]+"
|
||||
separator = r"(?:[._]|__|[-]+)"
|
||||
localhost = r"localhost"
|
||||
domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])"
|
||||
optionalPort = r"(?::[0-9]+)?"
|
||||
tag = r"[\w][\w.-]{0,127}"
|
||||
digestPat = r"[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][0-9a-fA-F]{32,}"
|
||||
ipv6address = r"\[(?:[a-fA-F0-9:]+)\]"
|
||||
|
||||
# domain name
|
||||
domainName = rf"{domainNameComponent}(?:\.{domainNameComponent})*"
|
||||
host = rf"(?:{domainName}|{ipv6address})"
|
||||
domainAndPort = rf"{host}{optionalPort}"
|
||||
|
||||
# image name
|
||||
pathComponent = rf"{alphanumeric_with_uppercase}(?:{separator}{alphanumeric_with_uppercase})*"
|
||||
remoteName = rf"{pathComponent}(?:\/{pathComponent})*"
|
||||
namePat = rf"(?:{domainAndPort}\/)?{remoteName}"
|
||||
|
||||
# Regex for a full image reference, with 3 groups: name, tag, digest
|
||||
referencePat = re.compile(rf"^({namePat})(?::({tag}))?(?:@({digestPat}))?$")
|
||||
|
||||
# Regex for splitting the name into domain and path components
|
||||
anchoredNameRegexp = re.compile(rf"^(?:({domainAndPort})\/)?({remoteName})$")
|
||||
|
||||
|
||||
def ensure_sha256_checksum(oci_blob: str):
|
||||
"""Validate that the reference is of the format sha256:<checksum>
|
||||
Return the checksum if valid, raise ValueError otherwise."""
|
||||
if ":" not in oci_blob:
|
||||
raise ValueError(f"Invalid OCI blob format: {oci_blob}")
|
||||
alg, checksum = oci_blob.split(":", 1)
|
||||
if alg != "sha256":
|
||||
raise ValueError(f"Unsupported OCI blob checksum algorithm: {alg}")
|
||||
if len(checksum) != 64:
|
||||
raise ValueError(f"Invalid OCI blob checksum length: {len(checksum)}")
|
||||
return checksum
|
||||
|
||||
|
||||
class Digest:
|
||||
"""Represents a digest in the format <algorithm>:<digest>.
|
||||
Currently only supports sha256 digests."""
|
||||
|
||||
__slots__ = ["algorithm", "digest"]
|
||||
|
||||
def __init__(self, *, algorithm: str, digest: str) -> None:
|
||||
self.algorithm = algorithm
|
||||
self.digest = digest
|
||||
|
||||
def __eq__(self, __value: object) -> bool:
|
||||
if not isinstance(__value, Digest):
|
||||
return NotImplemented
|
||||
return self.algorithm == __value.algorithm and self.digest == __value.digest
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string: str) -> "Digest":
|
||||
return cls(algorithm="sha256", digest=ensure_sha256_checksum(string))
|
||||
|
||||
@classmethod
|
||||
def from_sha256(cls, digest: str) -> "Digest":
|
||||
return cls(algorithm="sha256", digest=digest)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.algorithm}:{self.digest}"
|
||||
|
||||
|
||||
class ImageReference:
|
||||
"""A parsed image of the form domain/name:tag[@digest].
|
||||
The digest is optional, and domain and tag are automatically
|
||||
filled out with defaults when parsed from string."""
|
||||
|
||||
__slots__ = ["domain", "name", "tag", "digest"]
|
||||
|
||||
def __init__(
|
||||
self, *, domain: str, name: str, tag: str = "latest", digest: Optional[Digest] = None
|
||||
):
|
||||
self.domain = domain
|
||||
self.name = name
|
||||
self.tag = tag
|
||||
self.digest = digest
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string) -> "ImageReference":
|
||||
match = referencePat.match(string)
|
||||
if not match:
|
||||
raise ValueError(f"Invalid image reference: {string}")
|
||||
|
||||
image, tag, digest = match.groups()
|
||||
|
||||
assert isinstance(image, str)
|
||||
assert isinstance(tag, (str, type(None)))
|
||||
assert isinstance(digest, (str, type(None)))
|
||||
|
||||
match = anchoredNameRegexp.match(image)
|
||||
|
||||
# This can never happen, since the regex is implied
|
||||
# by the regex above. It's just here to make mypy happy.
|
||||
assert match, f"Invalid image reference: {string}"
|
||||
|
||||
domain, name = match.groups()
|
||||
|
||||
assert isinstance(domain, (str, type(None)))
|
||||
assert isinstance(name, str)
|
||||
|
||||
# Fill out defaults like docker would do...
|
||||
# Based on github.com/distribution/distribution: allow short names like "ubuntu"
|
||||
# and "user/repo" to be interpreted as "library/ubuntu" and "user/repo:latest
|
||||
# Not sure if Spack should follow Docker, but it's what people expect...
|
||||
if not domain:
|
||||
domain = "index.docker.io"
|
||||
name = f"library/{name}"
|
||||
elif (
|
||||
"." not in domain
|
||||
and ":" not in domain
|
||||
and domain != "localhost"
|
||||
and domain == domain.lower()
|
||||
):
|
||||
name = f"{domain}/{name}"
|
||||
domain = "index.docker.io"
|
||||
|
||||
# Lowercase the image name. This is enforced by Docker, although the OCI spec isn't clear?
|
||||
# We do this anyways, cause for example in Github Actions the <organization>/<repository>
|
||||
# part can have uppercase, and may be interpolated when specifying the relevant OCI image.
|
||||
name = name.lower()
|
||||
|
||||
if not tag:
|
||||
tag = "latest"
|
||||
|
||||
# sha256 is currently the only algorithm that
|
||||
# we implement, even though the spec allows for more
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
|
||||
return cls(domain=domain, name=name, tag=tag, digest=digest)
|
||||
|
||||
def manifest_url(self) -> str:
|
||||
digest_or_tag = self.digest or self.tag
|
||||
return f"https://{self.domain}/v2/{self.name}/manifests/{digest_or_tag}"
|
||||
|
||||
def blob_url(self, digest: Union[str, Digest]) -> str:
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
return f"https://{self.domain}/v2/{self.name}/blobs/{digest}"
|
||||
|
||||
def with_digest(self, digest: Union[str, Digest]) -> "ImageReference":
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
return ImageReference(domain=self.domain, name=self.name, tag=self.tag, digest=digest)
|
||||
|
||||
def with_tag(self, tag: str) -> "ImageReference":
|
||||
return ImageReference(domain=self.domain, name=self.name, tag=tag, digest=self.digest)
|
||||
|
||||
def uploads_url(self, digest: Optional[Digest] = None) -> str:
|
||||
url = f"https://{self.domain}/v2/{self.name}/blobs/uploads/"
|
||||
if digest:
|
||||
url += f"?digest={digest}"
|
||||
return url
|
||||
|
||||
def tags_url(self) -> str:
|
||||
return f"https://{self.domain}/v2/{self.name}/tags/list"
|
||||
|
||||
def endpoint(self, path: str = "") -> str:
|
||||
return urllib.parse.urljoin(f"https://{self.domain}/v2/", path)
|
||||
|
||||
def __str__(self) -> str:
|
||||
s = f"{self.domain}/{self.name}"
|
||||
if self.tag:
|
||||
s += f":{self.tag}"
|
||||
if self.digest:
|
||||
s += f"@{self.digest}"
|
||||
return s
|
||||
|
||||
def __eq__(self, __value: object) -> bool:
|
||||
if not isinstance(__value, ImageReference):
|
||||
return NotImplemented
|
||||
return (
|
||||
self.domain == __value.domain
|
||||
and self.name == __value.name
|
||||
and self.tag == __value.tag
|
||||
and self.digest == __value.digest
|
||||
)
|
||||
|
||||
|
||||
def _ensure_valid_tag(tag: str) -> str:
|
||||
"""Ensure a tag is valid for an OCI registry."""
|
||||
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
||||
if len(sanitized) > 128:
|
||||
return sanitized[:64] + sanitized[-64:]
|
||||
return sanitized
|
||||
|
||||
|
||||
def default_tag(spec: "spack.spec.Spec") -> str:
|
||||
"""Return a valid, default image tag for a spec."""
|
||||
return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
||||
|
||||
|
||||
#: Default OCI index tag
|
||||
default_index_tag = "index.spack"
|
||||
|
||||
|
||||
def tag_is_spec(tag: str) -> bool:
|
||||
"""Check if a tag is likely a Spec"""
|
||||
return tag.endswith(".spack") and tag != default_index_tag
|
||||
|
||||
|
||||
def default_config(architecture: str, os: str):
|
||||
return {
|
||||
"architecture": architecture,
|
||||
"os": os,
|
||||
"rootfs": {"type": "layers", "diff_ids": []},
|
||||
"config": {"Env": []},
|
||||
}
|
||||
|
||||
|
||||
def default_manifest():
|
||||
return {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
"config": {"mediaType": "application/vnd.oci.image.config.v1+json"},
|
||||
"layers": [],
|
||||
}
|
||||
381
lib/spack/spack/oci/oci.py
Normal file
381
lib/spack/spack/oci/oci.py
Normal file
@@ -0,0 +1,381 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.client import HTTPResponse
|
||||
from typing import NamedTuple, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.opener
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
import spack.util.crypto
|
||||
|
||||
from .image import Digest, ImageReference
|
||||
|
||||
|
||||
class Blob(NamedTuple):
|
||||
compressed_digest: Digest
|
||||
uncompressed_digest: Digest
|
||||
size: int
|
||||
|
||||
|
||||
def create_tarball(spec: spack.spec.Spec, tarfile_path):
|
||||
buildinfo = spack.binary_distribution.get_buildinfo_dict(spec)
|
||||
return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo)
|
||||
|
||||
|
||||
def _log_upload_progress(digest: Digest, size: int, elapsed: float):
|
||||
elapsed = max(elapsed, 0.001) # guard against division by zero
|
||||
tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)")
|
||||
|
||||
|
||||
def with_query_param(url: str, param: str, value: str) -> str:
|
||||
"""Add a query parameter to a URL
|
||||
|
||||
Args:
|
||||
url: The URL to add the parameter to.
|
||||
param: The parameter name.
|
||||
value: The parameter value.
|
||||
|
||||
Returns:
|
||||
The URL with the parameter added.
|
||||
"""
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
query = urllib.parse.parse_qs(parsed.query)
|
||||
if param in query:
|
||||
query[param].append(value)
|
||||
else:
|
||||
query[param] = [value]
|
||||
return urllib.parse.urlunparse(
|
||||
parsed._replace(query=urllib.parse.urlencode(query, doseq=True))
|
||||
)
|
||||
|
||||
|
||||
def upload_blob(
|
||||
ref: ImageReference,
|
||||
file: str,
|
||||
digest: Digest,
|
||||
force: bool = False,
|
||||
small_file_size: int = 0,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> bool:
|
||||
"""Uploads a blob to an OCI registry
|
||||
|
||||
We only do monolithic uploads, even though it's very simple to do chunked.
|
||||
Observed problems with chunked uploads:
|
||||
(1) it's slow, many sequential requests, (2) some registries set an *unknown*
|
||||
max chunk size, and the spec doesn't say how to obtain it
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
file: The file to upload.
|
||||
digest: The digest of the file.
|
||||
force: Whether to force upload the blob, even if it already exists.
|
||||
small_file_size: For files at most this size, attempt
|
||||
to do a single POST request instead of POST + PUT.
|
||||
Some registries do no support single requests, and others
|
||||
do not specify what size they support in single POST.
|
||||
For now this feature is disabled by default (0KB)
|
||||
|
||||
Returns:
|
||||
True if the blob was uploaded, False if it already existed.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
# Test if the blob already exists, if so, early exit.
|
||||
if not force and blob_exists(ref, digest, _urlopen):
|
||||
return False
|
||||
|
||||
start = time.time()
|
||||
|
||||
with open(file, "rb") as f:
|
||||
file_size = os.fstat(f.fileno()).st_size
|
||||
|
||||
# For small blobs, do a single POST request.
|
||||
# The spec says that registries MAY support this
|
||||
if file_size <= small_file_size:
|
||||
request = Request(
|
||||
url=ref.uploads_url(digest),
|
||||
method="POST",
|
||||
data=f,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": str(file_size),
|
||||
},
|
||||
)
|
||||
else:
|
||||
request = Request(
|
||||
url=ref.uploads_url(), method="POST", headers={"Content-Length": "0"}
|
||||
)
|
||||
|
||||
response = _urlopen(request)
|
||||
|
||||
# Created the blob in one go.
|
||||
if response.status == 201:
|
||||
_log_upload_progress(digest, file_size, time.time() - start)
|
||||
return True
|
||||
|
||||
# Otherwise, do another PUT request.
|
||||
spack.oci.opener.ensure_status(response, 202)
|
||||
assert "Location" in response.headers
|
||||
|
||||
# Can be absolute or relative, joining handles both
|
||||
upload_url = with_query_param(
|
||||
ref.endpoint(response.headers["Location"]), "digest", str(digest)
|
||||
)
|
||||
f.seek(0)
|
||||
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=upload_url,
|
||||
method="PUT",
|
||||
data=f,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": str(file_size),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
|
||||
# print elapsed time and # MB/s
|
||||
_log_upload_progress(digest, file_size, time.time() - start)
|
||||
return True
|
||||
|
||||
|
||||
def upload_manifest(
|
||||
ref: ImageReference,
|
||||
oci_manifest: dict,
|
||||
tag: bool = True,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
):
|
||||
"""Uploads a manifest/index to a registry
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
oci_manifest: The OCI manifest or index.
|
||||
tag: When true, use the tag, otherwise use the digest,
|
||||
this is relevant for multi-arch images, where the
|
||||
tag is an index, referencing the manifests by digest.
|
||||
|
||||
Returns:
|
||||
The digest and size of the uploaded manifest.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
data = json.dumps(oci_manifest, separators=(",", ":")).encode()
|
||||
digest = Digest.from_sha256(hashlib.sha256(data).hexdigest())
|
||||
size = len(data)
|
||||
|
||||
if not tag:
|
||||
ref = ref.with_digest(digest)
|
||||
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=ref.manifest_url(),
|
||||
method="PUT",
|
||||
data=data,
|
||||
headers={"Content-Type": oci_manifest["mediaType"]},
|
||||
)
|
||||
)
|
||||
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
return digest, size
|
||||
|
||||
|
||||
def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
|
||||
"""Given an OCI based mirror, extract the URL and image name from it"""
|
||||
url = mirror.push_url
|
||||
if not url.startswith("oci://"):
|
||||
raise ValueError(f"Mirror {mirror} is not an OCI mirror")
|
||||
return ImageReference.from_string(url[6:])
|
||||
|
||||
|
||||
def blob_exists(
|
||||
ref: ImageReference, digest: Digest, _urlopen: spack.oci.opener.MaybeOpen = None
|
||||
) -> bool:
|
||||
"""Checks if a blob exists in an OCI registry"""
|
||||
try:
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
response = _urlopen(Request(url=ref.blob_url(digest), method="HEAD"))
|
||||
return response.status == 200
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.getcode() == 404:
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def copy_missing_layers(
|
||||
src: ImageReference,
|
||||
dst: ImageReference,
|
||||
architecture: str,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> Tuple[dict, dict]:
|
||||
"""Copy image layers from src to dst for given architecture.
|
||||
|
||||
Args:
|
||||
src: The source image reference.
|
||||
dst: The destination image reference.
|
||||
architecture: The architecture (when referencing an index)
|
||||
|
||||
Returns:
|
||||
Tuple of manifest and config of the base image.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
manifest, config = get_manifest_and_config(src, architecture, _urlopen=_urlopen)
|
||||
|
||||
# Get layer digests
|
||||
digests = [Digest.from_string(layer["digest"]) for layer in manifest["layers"]]
|
||||
|
||||
# Filter digests that are don't exist in the registry
|
||||
missing_digests = [
|
||||
digest for digest in digests if not blob_exists(dst, digest, _urlopen=_urlopen)
|
||||
]
|
||||
|
||||
if not missing_digests:
|
||||
return manifest, config
|
||||
|
||||
# Pull missing blobs, push them to the registry
|
||||
with spack.stage.StageComposite.from_iterable(
|
||||
make_stage(url=src.blob_url(digest), digest=digest, _urlopen=_urlopen)
|
||||
for digest in missing_digests
|
||||
) as stages:
|
||||
stages.fetch()
|
||||
stages.check()
|
||||
stages.cache_local()
|
||||
|
||||
for stage, digest in zip(stages, missing_digests):
|
||||
# No need to check existince again, force=True.
|
||||
upload_blob(
|
||||
dst, file=stage.save_filename, force=True, digest=digest, _urlopen=_urlopen
|
||||
)
|
||||
|
||||
return manifest, config
|
||||
|
||||
|
||||
#: OCI manifest content types (including docker type)
|
||||
manifest_content_type = [
|
||||
"application/vnd.oci.image.manifest.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.v2+json",
|
||||
]
|
||||
|
||||
#: OCI index content types (including docker type)
|
||||
index_content_type = [
|
||||
"application/vnd.oci.image.index.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.list.v2+json",
|
||||
]
|
||||
|
||||
#: All OCI manifest / index content types
|
||||
all_content_type = manifest_content_type + index_content_type
|
||||
|
||||
|
||||
def get_manifest_and_config(
|
||||
ref: ImageReference,
|
||||
architecture="amd64",
|
||||
recurse=3,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> Tuple[dict, dict]:
|
||||
"""Recursively fetch manifest and config for a given image reference
|
||||
with a given architecture.
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
architecture: The architecture (when referencing an index)
|
||||
recurse: How many levels of index to recurse into.
|
||||
|
||||
Returns:
|
||||
A tuple of (manifest, config)"""
|
||||
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
# Get manifest
|
||||
response: HTTPResponse = _urlopen(
|
||||
Request(url=ref.manifest_url(), headers={"Accept": ", ".join(all_content_type)})
|
||||
)
|
||||
|
||||
# Recurse when we find an index
|
||||
if response.headers["Content-Type"] in index_content_type:
|
||||
if recurse == 0:
|
||||
raise Exception("Maximum recursion depth reached while fetching OCI manifest")
|
||||
|
||||
index = json.load(response)
|
||||
manifest_meta = next(
|
||||
manifest
|
||||
for manifest in index["manifests"]
|
||||
if manifest["platform"]["architecture"] == architecture
|
||||
)
|
||||
|
||||
return get_manifest_and_config(
|
||||
ref.with_digest(manifest_meta["digest"]),
|
||||
architecture=architecture,
|
||||
recurse=recurse - 1,
|
||||
_urlopen=_urlopen,
|
||||
)
|
||||
|
||||
# Otherwise, require a manifest
|
||||
if response.headers["Content-Type"] not in manifest_content_type:
|
||||
raise Exception(f"Unknown content type {response.headers['Content-Type']}")
|
||||
|
||||
manifest = json.load(response)
|
||||
|
||||
# Download, verify and cache config file
|
||||
config_digest = Digest.from_string(manifest["config"]["digest"])
|
||||
with make_stage(ref.blob_url(config_digest), config_digest, _urlopen=_urlopen) as stage:
|
||||
stage.fetch()
|
||||
stage.check()
|
||||
stage.cache_local()
|
||||
with open(stage.save_filename, "rb") as f:
|
||||
config = json.load(f)
|
||||
|
||||
return manifest, config
|
||||
|
||||
|
||||
#: Same as upload_manifest, but with retry wrapper
|
||||
upload_manifest_with_retry = spack.oci.opener.default_retry(upload_manifest)
|
||||
|
||||
#: Same as upload_blob, but with retry wrapper
|
||||
upload_blob_with_retry = spack.oci.opener.default_retry(upload_blob)
|
||||
|
||||
#: Same as get_manifest_and_config, but with retry wrapper
|
||||
get_manifest_and_config_with_retry = spack.oci.opener.default_retry(get_manifest_and_config)
|
||||
|
||||
#: Same as copy_missing_layers, but with retry wrapper
|
||||
copy_missing_layers_with_retry = spack.oci.opener.default_retry(copy_missing_layers)
|
||||
|
||||
|
||||
def make_stage(
|
||||
url: str, digest: Digest, keep: bool = False, _urlopen: spack.oci.opener.MaybeOpen = None
|
||||
) -> spack.stage.Stage:
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
|
||||
url, checksum=digest.digest, _urlopen=_urlopen
|
||||
)
|
||||
# Use blobs/<alg>/<encoded> as the cache path, which follows
|
||||
# the OCI Image Layout Specification. What's missing though,
|
||||
# is the `oci-layout` and `index.json` files, which are
|
||||
# required by the spec.
|
||||
return spack.stage.Stage(
|
||||
fetch_strategy,
|
||||
mirror_paths=spack.mirror.OCIImageLayout(digest),
|
||||
name=digest.digest,
|
||||
keep=keep,
|
||||
)
|
||||
442
lib/spack/spack/oci/opener.py
Normal file
442
lib/spack/spack/oci/opener.py
Normal file
@@ -0,0 +1,442 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""All the logic for OCI fetching and authentication"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from enum import Enum, auto
|
||||
from http.client import HTTPResponse
|
||||
from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.config
|
||||
import spack.mirror
|
||||
import spack.parser
|
||||
import spack.repo
|
||||
import spack.util.web
|
||||
|
||||
from .image import ImageReference
|
||||
|
||||
|
||||
def _urlopen():
|
||||
opener = create_opener()
|
||||
|
||||
def dispatch_open(fullurl, data=None, timeout=None):
|
||||
timeout = timeout or spack.config.get("config:connect_timeout", 10)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
OpenType = Callable[..., HTTPResponse]
|
||||
MaybeOpen = Optional[OpenType]
|
||||
|
||||
#: Opener that automatically uses OCI authentication based on mirror config
|
||||
urlopen: OpenType = llnl.util.lang.Singleton(_urlopen)
|
||||
|
||||
|
||||
SP = r" "
|
||||
OWS = r"[ \t]*"
|
||||
BWS = OWS
|
||||
HTAB = r"\t"
|
||||
VCHAR = r"\x21-\x7E"
|
||||
tchar = r"[!#$%&'*+\-.^_`|~0-9A-Za-z]"
|
||||
token = rf"{tchar}+"
|
||||
obs_text = r"\x80-\xFF"
|
||||
qdtext = rf"[{HTAB}{SP}\x21\x23-\x5B\x5D-\x7E{obs_text}]"
|
||||
quoted_pair = rf"\\([{HTAB}{SP}{VCHAR}{obs_text}])"
|
||||
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
|
||||
|
||||
|
||||
class TokenType(spack.parser.TokenBase):
|
||||
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
|
||||
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
|
||||
TOKEN = rf"{tchar}+"
|
||||
EQUALS = rf"{BWS}={BWS}"
|
||||
COMMA = rf"{OWS},{OWS}"
|
||||
SPACE = r" +"
|
||||
EOF = r"$"
|
||||
ANY = r"."
|
||||
|
||||
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
|
||||
|
||||
class State(Enum):
|
||||
CHALLENGE = auto()
|
||||
AUTH_PARAM_LIST_START = auto()
|
||||
AUTH_PARAM = auto()
|
||||
NEXT_IN_LIST = auto()
|
||||
AUTH_PARAM_OR_SCHEME = auto()
|
||||
|
||||
|
||||
def tokenize(input: str):
|
||||
scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
|
||||
|
||||
for match in iter(scanner.match, None): # type: ignore[var-annotated]
|
||||
yield spack.parser.Token(
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
|
||||
class Challenge:
|
||||
__slots__ = ["scheme", "params"]
|
||||
|
||||
def __init__(
|
||||
self, scheme: Optional[str] = None, params: Optional[List[Tuple[str, str]]] = None
|
||||
) -> None:
|
||||
self.scheme = scheme or ""
|
||||
self.params = params or []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Challenge({self.scheme}, {self.params})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return (
|
||||
isinstance(other, Challenge)
|
||||
and self.scheme == other.scheme
|
||||
and self.params == other.params
|
||||
)
|
||||
|
||||
|
||||
def parse_www_authenticate(input: str):
|
||||
"""Very basic parsing of www-authenticate parsing (RFC7235 section 4.1)
|
||||
Notice: this omits token68 support."""
|
||||
|
||||
# auth-scheme = token
|
||||
# auth-param = token BWS "=" BWS ( token / quoted-string )
|
||||
# challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ]
|
||||
# WWW-Authenticate = 1#challenge
|
||||
|
||||
challenges: List[Challenge] = []
|
||||
|
||||
_unquote = re.compile(quoted_pair).sub
|
||||
unquote = lambda s: _unquote(r"\1", s[1:-1])
|
||||
|
||||
mode: State = State.CHALLENGE
|
||||
tokens = tokenize(input)
|
||||
|
||||
current_challenge = Challenge()
|
||||
|
||||
def extract_auth_param(input: str) -> Tuple[str, str]:
|
||||
key, value = input.split("=", 1)
|
||||
key = key.rstrip()
|
||||
value = value.lstrip()
|
||||
if value.startswith('"'):
|
||||
value = unquote(value)
|
||||
return key, value
|
||||
|
||||
while True:
|
||||
token: spack.parser.Token = next(tokens)
|
||||
|
||||
if mode == State.CHALLENGE:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
current_challenge.scheme = token.value
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_LIST_START:
|
||||
if token.kind == TokenType.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
# Challenge without param list, followed by another challenge.
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge()
|
||||
mode = State.CHALLENGE
|
||||
elif token.kind == TokenType.SPACE:
|
||||
# A space means it must be followed by param list
|
||||
mode = State.AUTH_PARAM
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.NEXT_IN_LIST:
|
||||
if token.kind == TokenType.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
mode = State.AUTH_PARAM_OR_SCHEME
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_OR_SCHEME:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge(token.value)
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
|
||||
return challenges
|
||||
|
||||
|
||||
class RealmServiceScope(NamedTuple):
|
||||
realm: str
|
||||
service: str
|
||||
scope: str
|
||||
|
||||
|
||||
class UsernamePassword(NamedTuple):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
def get_bearer_challenge(challenges: List[Challenge]) -> Optional[RealmServiceScope]:
|
||||
# Find a challenge that we can handle (currently only Bearer)
|
||||
challenge = next((c for c in challenges if c.scheme == "Bearer"), None)
|
||||
|
||||
if challenge is None:
|
||||
return None
|
||||
|
||||
# Get realm / service / scope from challenge
|
||||
realm = next((v for k, v in challenge.params if k == "realm"), None)
|
||||
service = next((v for k, v in challenge.params if k == "service"), None)
|
||||
scope = next((v for k, v in challenge.params if k == "scope"), None)
|
||||
|
||||
if realm is None or service is None or scope is None:
|
||||
return None
|
||||
|
||||
return RealmServiceScope(realm, service, scope)
|
||||
|
||||
|
||||
class OCIAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self, credentials_provider: Callable[[str], Optional[UsernamePassword]]):
|
||||
"""
|
||||
Args:
|
||||
credentials_provider: A function that takes a domain and may return a UsernamePassword.
|
||||
"""
|
||||
self.credentials_provider = credentials_provider
|
||||
|
||||
# Cached bearer tokens for a given domain.
|
||||
self.cached_tokens: Dict[str, str] = {}
|
||||
|
||||
def obtain_bearer_token(self, registry: str, challenge: RealmServiceScope, timeout) -> str:
|
||||
# See https://docs.docker.com/registry/spec/auth/token/
|
||||
|
||||
query = urllib.parse.urlencode(
|
||||
{"service": challenge.service, "scope": challenge.scope, "client_id": "spack"}
|
||||
)
|
||||
|
||||
parsed = urllib.parse.urlparse(challenge.realm)._replace(
|
||||
query=query, fragment="", params=""
|
||||
)
|
||||
|
||||
# Don't send credentials over insecure transport.
|
||||
if parsed.scheme != "https":
|
||||
raise ValueError(
|
||||
f"Cannot login to {registry} over insecure {parsed.scheme} connection"
|
||||
)
|
||||
|
||||
request = Request(urllib.parse.urlunparse(parsed))
|
||||
|
||||
# I guess we shouldn't cache this, since we don't know
|
||||
# the context in which it's used (may depend on config)
|
||||
pair = self.credentials_provider(registry)
|
||||
|
||||
if pair is not None:
|
||||
encoded = base64.b64encode(f"{pair.username}:{pair.password}".encode("utf-8")).decode(
|
||||
"utf-8"
|
||||
)
|
||||
request.add_unredirected_header("Authorization", f"Basic {encoded}")
|
||||
|
||||
# Do a GET request.
|
||||
response = self.parent.open(request, timeout=timeout)
|
||||
|
||||
# Read the response and parse the JSON
|
||||
response_json = json.load(response)
|
||||
|
||||
# Get the token from the response
|
||||
token = response_json["token"]
|
||||
|
||||
# Remember the last obtained token for this registry
|
||||
# Note: we should probably take into account realm, service and scope
|
||||
# so we can store multiple tokens for the same registry.
|
||||
self.cached_tokens[registry] = token
|
||||
|
||||
return token
|
||||
|
||||
def https_request(self, req: Request):
|
||||
# Eagerly add the bearer token to the request if no
|
||||
# auth header is set yet, to avoid 401s in multiple
|
||||
# requests to the same registry.
|
||||
|
||||
# Use has_header, not .headers, since there are two
|
||||
# types of headers (redirected and unredirected)
|
||||
if req.has_header("Authorization"):
|
||||
return req
|
||||
|
||||
parsed = urllib.parse.urlparse(req.full_url)
|
||||
token = self.cached_tokens.get(parsed.netloc)
|
||||
|
||||
if not token:
|
||||
return req
|
||||
|
||||
req.add_unredirected_header("Authorization", f"Bearer {token}")
|
||||
return req
|
||||
|
||||
def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
# Login failed, avoid infinite recursion where we go back and
|
||||
# forth between auth server and registry
|
||||
if hasattr(req, "login_attempted"):
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp
|
||||
)
|
||||
|
||||
# On 401 Unauthorized, parse the WWW-Authenticate header
|
||||
# to determine what authentication is required
|
||||
if "WWW-Authenticate" not in headers:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
"Cannot login to registry, missing WWW-Authenticate header",
|
||||
headers,
|
||||
fp,
|
||||
)
|
||||
|
||||
header_value = headers["WWW-Authenticate"]
|
||||
|
||||
try:
|
||||
challenge = get_bearer_challenge(parse_www_authenticate(header_value))
|
||||
except ValueError as e:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}",
|
||||
headers,
|
||||
fp,
|
||||
) from e
|
||||
|
||||
# If there is no bearer challenge, we can't handle it
|
||||
if not challenge:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, unsupported authentication scheme: {header_value}",
|
||||
headers,
|
||||
fp,
|
||||
)
|
||||
|
||||
# Get the token from the auth handler
|
||||
try:
|
||||
token = self.obtain_bearer_token(
|
||||
registry=urllib.parse.urlparse(req.get_full_url()).netloc,
|
||||
challenge=challenge,
|
||||
timeout=req.timeout,
|
||||
)
|
||||
except ValueError as e:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, failed to obtain bearer token: {e}",
|
||||
headers,
|
||||
fp,
|
||||
) from e
|
||||
|
||||
# Add the token to the request
|
||||
req.add_unredirected_header("Authorization", f"Bearer {token}")
|
||||
setattr(req, "login_attempted", True)
|
||||
|
||||
return self.parent.open(req, timeout=req.timeout)
|
||||
|
||||
|
||||
def credentials_from_mirrors(
|
||||
domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None
|
||||
) -> Optional[UsernamePassword]:
|
||||
"""Filter out OCI registry credentials from a list of mirrors."""
|
||||
|
||||
mirrors = mirrors or spack.mirror.MirrorCollection().values()
|
||||
|
||||
for mirror in mirrors:
|
||||
# Prefer push credentials over fetch. Unlikely that those are different
|
||||
# but our config format allows it.
|
||||
for direction in ("push", "fetch"):
|
||||
pair = mirror.get_access_pair(direction)
|
||||
if pair is None:
|
||||
continue
|
||||
url = mirror.get_url(direction)
|
||||
if not url.startswith("oci://"):
|
||||
continue
|
||||
try:
|
||||
parsed = ImageReference.from_string(url[6:])
|
||||
except ValueError:
|
||||
continue
|
||||
if parsed.domain == domain:
|
||||
return UsernamePassword(*pair)
|
||||
return None
|
||||
|
||||
|
||||
def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
urllib.request.HTTPRedirectHandler(),
|
||||
urllib.request.HTTPErrorProcessor(),
|
||||
OCIAuthHandler(credentials_from_mirrors),
|
||||
]:
|
||||
opener.add_handler(handler)
|
||||
return opener
|
||||
|
||||
|
||||
def ensure_status(response: HTTPResponse, status: int):
|
||||
"""Raise an error if the response status is not the expected one."""
|
||||
if response.status == status:
|
||||
return
|
||||
|
||||
raise urllib.error.HTTPError(
|
||||
response.geturl(), response.status, response.reason, response.info(), None
|
||||
)
|
||||
|
||||
|
||||
def default_retry(f, retries: int = 3, sleep=None):
|
||||
sleep = sleep or time.sleep
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except urllib.error.HTTPError as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
continue
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
@@ -5,10 +5,12 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util import windows_registry as winreg
|
||||
from spack.version import Version
|
||||
|
||||
from ._operating_system import OperatingSystem
|
||||
@@ -31,43 +33,6 @@ class WindowsOs(OperatingSystem):
|
||||
10.
|
||||
"""
|
||||
|
||||
# Find MSVC directories using vswhere
|
||||
comp_search_paths = []
|
||||
vs_install_paths = []
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if root:
|
||||
try:
|
||||
extra_args = {"encoding": "mbcs", "errors": "strict"}
|
||||
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
|
||||
[
|
||||
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
],
|
||||
**extra_args,
|
||||
).strip()
|
||||
vs_install_paths = paths.split("\n")
|
||||
msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
|
||||
for p in msvc_paths:
|
||||
comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
comp_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(
|
||||
str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin"
|
||||
)
|
||||
)
|
||||
)
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
pass
|
||||
if comp_search_paths:
|
||||
compiler_search_paths = comp_search_paths
|
||||
|
||||
def __init__(self):
|
||||
plat_ver = windows_version()
|
||||
if plat_ver < Version("10"):
|
||||
@@ -76,3 +41,71 @@ def __init__(self):
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def vs_install_paths(self):
|
||||
vs_install_paths = []
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if root:
|
||||
try:
|
||||
extra_args = {"encoding": "mbcs", "errors": "strict"}
|
||||
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
|
||||
[
|
||||
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
],
|
||||
**extra_args,
|
||||
).strip()
|
||||
vs_install_paths = paths.split("\n")
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
pass
|
||||
return vs_install_paths
|
||||
|
||||
@property
|
||||
def msvc_paths(self):
|
||||
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
# First Strategy: Find MSVC directories using vswhere
|
||||
_compiler_search_paths = []
|
||||
for p in self.msvc_paths:
|
||||
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
_compiler_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
)
|
||||
# Second strategy: Find MSVC via the registry
|
||||
msft = winreg.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
|
||||
)
|
||||
vs_entries = msft.find_subkeys(r"VisualStudio_.*")
|
||||
vs_paths = []
|
||||
|
||||
def clean_vs_path(path):
|
||||
path = path.split(",")[0].lstrip("@")
|
||||
return str((pathlib.Path(path).parent / "..\\..").resolve())
|
||||
|
||||
for entry in vs_entries:
|
||||
try:
|
||||
val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value
|
||||
vs_paths.append(clean_vs_path(val))
|
||||
except FileNotFoundError as e:
|
||||
if hasattr(e, "winerror"):
|
||||
if e.winerror == 2:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
_compiler_search_paths.extend(vs_paths)
|
||||
return _compiler_search_paths
|
||||
|
||||
@@ -991,13 +991,14 @@ def find_valid_url_for_version(self, version):
|
||||
return None
|
||||
|
||||
def _make_resource_stage(self, root_stage, resource):
|
||||
pretty_resource_name = fsys.polite_filename(f"{resource.name}-{self.version}")
|
||||
return ResourceStage(
|
||||
resource.fetcher,
|
||||
root=root_stage,
|
||||
resource=resource,
|
||||
name=self._resource_stage(resource),
|
||||
mirror_paths=spack.mirror.mirror_archive_paths(
|
||||
resource.fetcher, os.path.join(self.name, f"{resource.name}-{self.version}")
|
||||
resource.fetcher, os.path.join(self.name, pretty_resource_name)
|
||||
),
|
||||
path=self.path,
|
||||
)
|
||||
@@ -1008,8 +1009,10 @@ def _download_search(self):
|
||||
|
||||
def _make_root_stage(self, fetcher):
|
||||
# Construct a mirror path (TODO: get this out of package.py)
|
||||
format_string = "{name}-{version}"
|
||||
pretty_name = self.spec.format_path(format_string)
|
||||
mirror_paths = spack.mirror.mirror_archive_paths(
|
||||
fetcher, os.path.join(self.name, f"{self.name}-{self.version}"), self.spec
|
||||
fetcher, os.path.join(self.name, pretty_name), self.spec
|
||||
)
|
||||
# Construct a path where the stage should build..
|
||||
s = self.spec
|
||||
@@ -1154,7 +1157,7 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.21. Use 'install_test_root(pkg)' instead."
|
||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
@@ -1805,14 +1808,7 @@ def do_install(self, **kwargs):
|
||||
verbose (bool): Display verbose build output (by default,
|
||||
suppresses it)
|
||||
"""
|
||||
# Non-transitive dev specs need to keep the dev stage and be built from
|
||||
# source every time. Transitive ones just need to be built from source.
|
||||
dev_path_var = self.spec.variants.get("dev_path", None)
|
||||
if dev_path_var:
|
||||
kwargs["keep_stage"] = True
|
||||
|
||||
builder = PackageInstaller([(self, kwargs)])
|
||||
builder.install()
|
||||
PackageInstaller([(self, kwargs)]).install()
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
@@ -1833,7 +1829,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.21. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec = [name] [node_options] { ^ node } |
|
||||
spec = [name] [node_options] { ^[edge_properties] node } |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
edge_properties = [ { bool_variant | key_value } ]
|
||||
|
||||
hash = / id
|
||||
filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml)
|
||||
@@ -64,19 +65,21 @@
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.deptypes
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
#: Valid name for specs and variants. Here we are not using
|
||||
#: the previous "w[\w.-]*" since that would match most
|
||||
#: characters that can be part of a word in any language
|
||||
IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)"
|
||||
DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)"
|
||||
GIT_HASH = r"([A-Fa-f0-9]{40})"
|
||||
GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))"
|
||||
IDENTIFIER = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9\-]*)"
|
||||
DOTTED_IDENTIFIER = rf"(?:{IDENTIFIER}(?:\.{IDENTIFIER})+)"
|
||||
GIT_HASH = r"(?:[A-Fa-f0-9]{40})"
|
||||
#: Git refs include branch names, and can contain "." and "/"
|
||||
GIT_REF = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9./\-]*)"
|
||||
GIT_VERSION_PATTERN = rf"(?:(?:git\.(?:{GIT_REF}))|(?:{GIT_HASH}))"
|
||||
|
||||
NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*"
|
||||
|
||||
@@ -85,19 +88,19 @@
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/,
|
||||
# or on Windows, a drive letter followed by a colon and "\"
|
||||
# or "." or {name}\
|
||||
WINDOWS_FILENAME = r"(\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)([a-zA-Z0-9-_\.\\]*)(\.json|\.yaml)"
|
||||
UNIX_FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)"
|
||||
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
||||
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
|
||||
if not IS_WINDOWS:
|
||||
FILENAME = UNIX_FILENAME
|
||||
else:
|
||||
FILENAME = WINDOWS_FILENAME
|
||||
|
||||
VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
|
||||
VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)"
|
||||
VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*"
|
||||
VERSION = r"=?(?:[a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
|
||||
VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
@@ -125,34 +128,37 @@ class TokenType(TokenBase):
|
||||
"""
|
||||
|
||||
# Dependency
|
||||
DEPENDENCY = r"(\^)"
|
||||
START_EDGE_PROPERTIES = r"(?:\^\[)"
|
||||
END_EDGE_PROPERTIES = r"(?:\])"
|
||||
DEPENDENCY = r"(?:\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))"
|
||||
VERSION = rf"(@\s*({VERSION_LIST}))"
|
||||
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
|
||||
GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})"
|
||||
VERSION = rf"(?:@\s*(?:{VERSION_LIST}))"
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"([~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}\s*==\s*(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}\s*=\s*(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))"
|
||||
COMPILER = rf"(%\s*({NAME}))"
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = rf"({FILENAME})"
|
||||
FILENAME = rf"(?:{FILENAME})"
|
||||
# Package name
|
||||
FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})"
|
||||
UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})"
|
||||
FULLY_QUALIFIED_PACKAGE_NAME = rf"(?:{DOTTED_IDENTIFIER})"
|
||||
UNQUALIFIED_PACKAGE_NAME = rf"(?:{IDENTIFIER})"
|
||||
# DAG hash
|
||||
DAG_HASH = rf"(/({HASH}))"
|
||||
DAG_HASH = rf"(?:/(?:{HASH}))"
|
||||
# White spaces
|
||||
WS = r"(\s+)"
|
||||
WS = r"(?:\s+)"
|
||||
|
||||
|
||||
class ErrorTokenType(TokenBase):
|
||||
"""Enum with regexes for error analysis"""
|
||||
|
||||
# Unexpected character
|
||||
UNEXPECTED = r"(.[\s]*)"
|
||||
UNEXPECTED = r"(?:.[\s]*)"
|
||||
|
||||
|
||||
class Token:
|
||||
@@ -161,7 +167,7 @@ class Token:
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(
|
||||
self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
@@ -261,8 +267,8 @@ def tokens(self) -> List[Token]:
|
||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
||||
|
||||
def next_spec(
|
||||
self, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> Optional[spack.spec.Spec]:
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
"""Return the next spec parsed from text.
|
||||
|
||||
Args:
|
||||
@@ -278,16 +284,15 @@ def next_spec(
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = SpecNodeParser(self.ctx).parse()
|
||||
|
||||
if dependency is None:
|
||||
msg = (
|
||||
"this dependency sigil needs to be followed by a package name "
|
||||
"or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
|
||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
edge_properties.setdefault("virtuals", ())
|
||||
dependency = self._parse_node(root_spec)
|
||||
root_spec._add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = self._parse_node(root_spec)
|
||||
root_spec._add_dependency(dependency, depflag=0, virtuals=())
|
||||
|
||||
else:
|
||||
@@ -295,7 +300,19 @@ def next_spec(
|
||||
|
||||
return root_spec
|
||||
|
||||
def all_specs(self) -> List[spack.spec.Spec]:
|
||||
def _parse_node(self, root_spec):
|
||||
dependency = SpecNodeParser(self.ctx).parse()
|
||||
if dependency is None:
|
||||
msg = (
|
||||
"the dependency sigil and any optional edge attributes must be followed by a "
|
||||
"package name or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
return dependency
|
||||
|
||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||
"""Return all the specs that remain to be parsed"""
|
||||
return list(iter(self.next_spec, None))
|
||||
|
||||
@@ -310,7 +327,9 @@ def __init__(self, ctx):
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
def parse(
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
|
||||
Args:
|
||||
@@ -358,8 +377,10 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
compiler_name.strip(), compiler_version
|
||||
)
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
elif (
|
||||
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(TokenType.GIT_VERSION)
|
||||
or self.ctx.accept(TokenType.VERSION)
|
||||
):
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
@@ -409,7 +430,7 @@ class FileParser:
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
"""Parse a spec tree from a specfile.
|
||||
|
||||
Args:
|
||||
@@ -432,7 +453,42 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
return initial_spec
|
||||
|
||||
|
||||
def parse(text: str) -> List[spack.spec.Spec]:
|
||||
class EdgeAttributeParser:
|
||||
__slots__ = "ctx", "literal_str"
|
||||
|
||||
def __init__(self, ctx, literal_str):
|
||||
self.ctx = ctx
|
||||
self.literal_str = literal_str
|
||||
|
||||
def parse(self):
|
||||
attributes = {}
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
msg = (
|
||||
"the only edge attributes that are currently accepted "
|
||||
'are "deptypes" and "virtuals"'
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||
elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
|
||||
break
|
||||
else:
|
||||
msg = "unexpected token in edge attributes"
|
||||
raise SpecParsingError(msg, self.ctx.next_token, self.literal_str)
|
||||
|
||||
# Turn deptypes=... to depflag representation
|
||||
if "deptypes" in attributes:
|
||||
deptype_string = attributes.pop("deptypes")
|
||||
attributes["depflag"] = spack.deptypes.canonicalize(deptype_string)
|
||||
return attributes
|
||||
|
||||
|
||||
def parse(text: str) -> List["spack.spec.Spec"]:
|
||||
"""Parse text into a list of strings
|
||||
|
||||
Args:
|
||||
@@ -445,8 +501,8 @@ def parse(text: str) -> List[spack.spec.Spec]:
|
||||
|
||||
|
||||
def parse_one_or_raise(
|
||||
text: str, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> spack.spec.Spec:
|
||||
text: str, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> "spack.spec.Spec":
|
||||
"""Parse exactly one spec from text and return it, or raise
|
||||
|
||||
Args:
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -36,10 +37,12 @@ def apply_patch(stage, patch_path, level=1, working_dir="."):
|
||||
"""
|
||||
git_utils_path = os.environ.get("PATH", "")
|
||||
if sys.platform == "win32":
|
||||
git = which_string("git", required=True)
|
||||
git_root = git.split("\\")[:-2]
|
||||
git_root.extend(["usr", "bin"])
|
||||
git_utils_path = os.sep.join(git_root)
|
||||
git = which_string("git")
|
||||
if git:
|
||||
git = pathlib.Path(git)
|
||||
git_root = git.parent.parent
|
||||
git_root = git_root / "usr" / "bin"
|
||||
git_utils_path = os.pathsep.join([str(git_root), git_utils_path])
|
||||
|
||||
# TODO: Decouple Spack's patch support on Windows from Git
|
||||
# for Windows, and instead have Spack directly fetch, install, and
|
||||
@@ -312,21 +315,19 @@ def from_json(cls, stream, repository):
|
||||
def to_json(self, stream):
|
||||
sjson.dump({"patches": self.index}, stream)
|
||||
|
||||
def patch_for_package(self, sha256, pkg):
|
||||
def patch_for_package(self, sha256: str, pkg):
|
||||
"""Look up a patch in the index and build a patch object for it.
|
||||
|
||||
Arguments:
|
||||
sha256 (str): sha256 hash to look up
|
||||
sha256: sha256 hash to look up
|
||||
pkg (spack.package_base.PackageBase): Package object to get patch for.
|
||||
|
||||
We build patch objects lazily because building them requires that
|
||||
we have information about the package's location in its repo.
|
||||
|
||||
"""
|
||||
we have information about the package's location in its repo."""
|
||||
sha_index = self.index.get(sha256)
|
||||
if not sha_index:
|
||||
raise NoSuchPatchError(
|
||||
"Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256)
|
||||
raise PatchLookupError(
|
||||
f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}"
|
||||
)
|
||||
|
||||
# Find patches for this class or any class it inherits from
|
||||
@@ -335,8 +336,8 @@ def patch_for_package(self, sha256, pkg):
|
||||
if patch_dict:
|
||||
break
|
||||
else:
|
||||
raise NoSuchPatchError(
|
||||
"Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256)
|
||||
raise PatchLookupError(
|
||||
f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}"
|
||||
)
|
||||
|
||||
# add the sha256 back (we take it out on write to save space,
|
||||
@@ -405,5 +406,9 @@ class NoSuchPatchError(spack.error.SpackError):
|
||||
"""Raised when a patch file doesn't exist."""
|
||||
|
||||
|
||||
class PatchLookupError(NoSuchPatchError):
|
||||
"""Raised when a patch file cannot be located from sha256."""
|
||||
|
||||
|
||||
class PatchDirectiveError(spack.error.SpackError):
|
||||
"""Raised when the wrong arguments are suppled to the patch directive."""
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to manage providers of virtual dependencies"""
|
||||
import itertools
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
import spack.error
|
||||
@@ -11,33 +10,6 @@
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
|
||||
def _cross_provider_maps(lmap, rmap):
|
||||
"""Return a dictionary that combines constraint requests from both input.
|
||||
|
||||
Args:
|
||||
lmap: main provider map
|
||||
rmap: provider map with additional constraints
|
||||
"""
|
||||
# TODO: this is pretty darned nasty, and inefficient, but there
|
||||
# TODO: are not that many vdeps in most specs.
|
||||
result = {}
|
||||
for lspec, rspec in itertools.product(lmap, rmap):
|
||||
try:
|
||||
constrained = lspec.constrained(rspec)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
continue
|
||||
|
||||
# lp and rp are left and right provider specs.
|
||||
for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
|
||||
if lp_spec.name == rp_spec.name:
|
||||
try:
|
||||
const = lp_spec.constrained(rp_spec, deps=False)
|
||||
result.setdefault(constrained, set()).add(const)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
class _IndexBase:
|
||||
#: This is a dict of dicts used for finding providers of particular
|
||||
#: virtual dependencies. The dict of dicts looks like:
|
||||
@@ -81,29 +53,6 @@ def providers_for(self, virtual_spec):
|
||||
def __contains__(self, name):
|
||||
return name in self.providers
|
||||
|
||||
def satisfies(self, other):
|
||||
"""Determine if the providers of virtual specs are compatible.
|
||||
|
||||
Args:
|
||||
other: another provider index
|
||||
|
||||
Returns:
|
||||
True if the providers are compatible, False otherwise.
|
||||
"""
|
||||
common = set(self.providers) & set(other.providers)
|
||||
if not common:
|
||||
return True
|
||||
|
||||
# This ensures that some provider in other COULD satisfy the
|
||||
# vpkg constraints on self.
|
||||
result = {}
|
||||
for name in common:
|
||||
crossed = _cross_provider_maps(self.providers[name], other.providers[name])
|
||||
if crossed:
|
||||
result[name] = crossed
|
||||
|
||||
return all(c in result for c in common)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.providers == other.providers
|
||||
|
||||
|
||||
@@ -141,6 +141,7 @@
|
||||
}
|
||||
)
|
||||
|
||||
# TODO: Remove in Spack 0.23
|
||||
ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -166,6 +167,7 @@
|
||||
properties = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
# TODO: Replace with core-shared-properties in Spack 0.23
|
||||
ci_properties,
|
||||
# Allow legacy format under `ci` for `config update ci`
|
||||
spack.schema.gitlab_ci.gitlab_ci_properties,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user