Compare commits
412 Commits
features/r
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9ab82ff55 | ||
|
|
a17c7e9cbb | ||
|
|
7d5721fcfd | ||
|
|
858b738775 | ||
|
|
af49cb9724 | ||
|
|
124363b1b5 | ||
|
|
eec5dd88e0 | ||
|
|
582476849e | ||
|
|
f765f658ae | ||
|
|
537c150041 | ||
|
|
c4394822d5 | ||
|
|
6855512301 | ||
|
|
f721d4c625 | ||
|
|
b5668bac53 | ||
|
|
9c47ecaeb2 | ||
|
|
19c20563cc | ||
|
|
7143f1f9fa | ||
|
|
51ecc550ba | ||
|
|
a3697270d3 | ||
|
|
b6d69bfad2 | ||
|
|
0f74f796de | ||
|
|
24d12c632c | ||
|
|
441b68aca3 | ||
|
|
60feb60c0a | ||
|
|
88bdee05d0 | ||
|
|
f02f605d4e | ||
|
|
1abf0c6910 | ||
|
|
ff90faa8ca | ||
|
|
c074bf0865 | ||
|
|
b961b42ece | ||
|
|
3d4afb0d73 | ||
|
|
5e1d2990d0 | ||
|
|
c32d5a4eba | ||
|
|
3ff7b8d381 | ||
|
|
60d0cd1f21 | ||
|
|
77c376129c | ||
|
|
2e4462792c | ||
|
|
b50f131a01 | ||
|
|
13806213d9 | ||
|
|
5a714a79ea | ||
|
|
7cb873fb87 | ||
|
|
408b0e0c45 | ||
|
|
2b0268864c | ||
|
|
0d449756dd | ||
|
|
bc4ecccfbf | ||
|
|
9ee4876eb2 | ||
|
|
d96f8efb9c | ||
|
|
a2dc11acd3 | ||
|
|
2f0df0131c | ||
|
|
dd8941abc9 | ||
|
|
b341030a0f | ||
|
|
1d6cea6af2 | ||
|
|
327a7a4031 | ||
|
|
6ac75f47e8 | ||
|
|
843346ce1b | ||
|
|
23f03966b4 | ||
|
|
4540980337 | ||
|
|
ec9d08e71e | ||
|
|
397c066464 | ||
|
|
4d1b5d6a88 | ||
|
|
0cae943b5c | ||
|
|
78c6c607db | ||
|
|
0da1fae709 | ||
|
|
94fc2314f1 | ||
|
|
05761de8c7 | ||
|
|
ecdf3ff297 | ||
|
|
ea7e3e4f9f | ||
|
|
8371bb4e19 | ||
|
|
0a5f2fc94d | ||
|
|
45b2c207db | ||
|
|
e7f897f959 | ||
|
|
1aaab97a16 | ||
|
|
3053e701c0 | ||
|
|
20572fb87b | ||
|
|
7e2e063979 | ||
|
|
16e27ba4a6 | ||
|
|
2fda288cc5 | ||
|
|
9986652b27 | ||
|
|
fd46923216 | ||
|
|
bb2975b7f1 | ||
|
|
1168f19e60 | ||
|
|
5d50ad3941 | ||
|
|
a43156a861 | ||
|
|
ec2729706b | ||
|
|
494d3f9002 | ||
|
|
4f8b856145 | ||
|
|
0eca79e7e4 | ||
|
|
f245bde772 | ||
|
|
4aee067bb0 | ||
|
|
cc25a0e561 | ||
|
|
3f063153f0 | ||
|
|
aa350a4ed1 | ||
|
|
e36bee41a0 | ||
|
|
138d0c7a13 | ||
|
|
a688479564 | ||
|
|
5ead4c2d56 | ||
|
|
2e18fbbdeb | ||
|
|
02eafeee03 | ||
|
|
812a43621b | ||
|
|
0fe338b526 | ||
|
|
3dc02e55e6 | ||
|
|
7023edb37c | ||
|
|
f1fdaca345 | ||
|
|
d4454e54dc | ||
|
|
969718d176 | ||
|
|
0a9179fddb | ||
|
|
b5b0a76991 | ||
|
|
59b39f3eba | ||
|
|
7a0c4e8017 | ||
|
|
1ddf4ee6ba | ||
|
|
12d0507cb7 | ||
|
|
cf99912352 | ||
|
|
9723fe88f5 | ||
|
|
2439ff56a5 | ||
|
|
2ef8d09fc7 | ||
|
|
e5e767b300 | ||
|
|
1c6b38f36d | ||
|
|
091cd47caa | ||
|
|
1ebf1a0c6c | ||
|
|
56761649a2 | ||
|
|
6a19cf1b42 | ||
|
|
ef4274ed2e | ||
|
|
88b8fc63ef | ||
|
|
639a6a6897 | ||
|
|
af96fef1da | ||
|
|
7550a41660 | ||
|
|
ffd2a34d9e | ||
|
|
6a74a82e19 | ||
|
|
ebb7c5ac8f | ||
|
|
14c7bfe9ce | ||
|
|
ed52b505d4 | ||
|
|
b111064e22 | ||
|
|
17d47accf9 | ||
|
|
d7fb298a6b | ||
|
|
107ea768ab | ||
|
|
8797dd35f7 | ||
|
|
0596a46cd9 | ||
|
|
9d406463d4 | ||
|
|
86906bf5b3 | ||
|
|
03ddccbc93 | ||
|
|
12db37906b | ||
|
|
b158a15754 | ||
|
|
b82f78003c | ||
|
|
49616d3020 | ||
|
|
8467f8ae8a | ||
|
|
5b6137d91a | ||
|
|
b7edcbecd7 | ||
|
|
5ccbe68f16 | ||
|
|
9fe4cef89e | ||
|
|
165c6cef08 | ||
|
|
0efd5287c4 | ||
|
|
b1ab01280a | ||
|
|
ab84876e2c | ||
|
|
e2d5be83e7 | ||
|
|
85cdf37d3b | ||
|
|
06521b44b6 | ||
|
|
1e5325eea0 | ||
|
|
0995a29c5c | ||
|
|
133d6e2656 | ||
|
|
36117444aa | ||
|
|
330a9a7c9a | ||
|
|
0dc3fc2d21 | ||
|
|
a972314fa6 | ||
|
|
16d1ed3591 | ||
|
|
5c25f16df2 | ||
|
|
b3ccaa81a7 | ||
|
|
a0041731a3 | ||
|
|
a690b8c27c | ||
|
|
a1fa862c3f | ||
|
|
80f31829a8 | ||
|
|
84436f10ba | ||
|
|
660485709d | ||
|
|
251dce05c9 | ||
|
|
ecd05fdfb4 | ||
|
|
9ffcf36444 | ||
|
|
07258a7c80 | ||
|
|
395e53a5e0 | ||
|
|
77c331c753 | ||
|
|
ee5481a861 | ||
|
|
f7ec061c64 | ||
|
|
7cb70ff4b1 | ||
|
|
4a661f3255 | ||
|
|
7037240879 | ||
|
|
0e96dfaeef | ||
|
|
a0a2cd6a1a | ||
|
|
170c05bebb | ||
|
|
bdf68b7ac0 | ||
|
|
c176de94e2 | ||
|
|
f63dbbe75d | ||
|
|
a0c7b10c76 | ||
|
|
2dc3bf0164 | ||
|
|
9bf6e05d02 | ||
|
|
cd283846af | ||
|
|
03625c1c95 | ||
|
|
f01774f1d4 | ||
|
|
965860d1f8 | ||
|
|
c4baf4e199 | ||
|
|
dd82227ae7 | ||
|
|
a9028630a5 | ||
|
|
789c85ed8b | ||
|
|
cf9d36fd64 | ||
|
|
ef7ce46649 | ||
|
|
334a50662f | ||
|
|
d68e73d006 | ||
|
|
7d7f097295 | ||
|
|
37cdcc7172 | ||
|
|
0a40bb72e8 | ||
|
|
24b6edac89 | ||
|
|
3e7acf3e61 | ||
|
|
ede36512e7 | ||
|
|
e06b169720 | ||
|
|
7ed968d42c | ||
|
|
c673b9245c | ||
|
|
27c0dab5ca | ||
|
|
b82bd8e6b6 | ||
|
|
5351382501 | ||
|
|
8c29e90fa9 | ||
|
|
045f398f3d | ||
|
|
6986e70877 | ||
|
|
4d59e746fd | ||
|
|
f6de34f9db | ||
|
|
0f0adb71d0 | ||
|
|
4ec958c5c6 | ||
|
|
2aa07fa557 | ||
|
|
239d343588 | ||
|
|
44604708ad | ||
|
|
f0109e4afe | ||
|
|
b8f90e1bdc | ||
|
|
8b9064e5e4 | ||
|
|
ce79785c10 | ||
|
|
af378c7f31 | ||
|
|
cf50bfb7c2 | ||
|
|
620e090ff5 | ||
|
|
c4d86a9c2e | ||
|
|
3b74b894c7 | ||
|
|
3fa8afc036 | ||
|
|
60628075cb | ||
|
|
9e4fab277b | ||
|
|
5588e328f7 | ||
|
|
93a1fc90c9 | ||
|
|
7297721e78 | ||
|
|
eb57d96ea9 | ||
|
|
ce09642922 | ||
|
|
cd88eb1ed0 | ||
|
|
826df84baf | ||
|
|
0a4b365a7d | ||
|
|
a2ed4704e7 | ||
|
|
28b49d5d2f | ||
|
|
16bb4c360a | ||
|
|
cfd58bdafe | ||
|
|
53493ceab1 | ||
|
|
64cd429cc8 | ||
|
|
525809632e | ||
|
|
a6c32c80ab | ||
|
|
57ad848f47 | ||
|
|
15623d8077 | ||
|
|
c352db7645 | ||
|
|
5d999d0e4f | ||
|
|
694a1ff340 | ||
|
|
4ec451cfed | ||
|
|
a77eca7f88 | ||
|
|
14ac2b063a | ||
|
|
edf4d6659d | ||
|
|
6531fbf425 | ||
|
|
0a6045eadf | ||
|
|
5722a13af0 | ||
|
|
9f1223e7a3 | ||
|
|
5beef28444 | ||
|
|
e618a93f3d | ||
|
|
3f0ec5c580 | ||
|
|
14392efc6d | ||
|
|
d7406aaaa5 | ||
|
|
5a7e691ae2 | ||
|
|
b9f63ab40b | ||
|
|
4417b1f9ee | ||
|
|
04f14166cb | ||
|
|
223a54098e | ||
|
|
ea505e2d26 | ||
|
|
e2b51e01be | ||
|
|
a04ee77f77 | ||
|
|
bb03ce7281 | ||
|
|
31640652c7 | ||
|
|
0ff0e8944e | ||
|
|
a877d812d0 | ||
|
|
24a59ffd36 | ||
|
|
57f46f0375 | ||
|
|
7d45e132a6 | ||
|
|
e7ac676417 | ||
|
|
94ba152ef5 | ||
|
|
5404a5bb82 | ||
|
|
b522d8f610 | ||
|
|
2a57c11d28 | ||
|
|
1aa3a641ee | ||
|
|
6feba1590c | ||
|
|
58a7912435 | ||
|
|
03ae2eb223 | ||
|
|
013f0d3a13 | ||
|
|
3e68aa0b2f | ||
|
|
1da0d0342b | ||
|
|
6f7d91aebf | ||
|
|
071c74d185 | ||
|
|
51435d6d69 | ||
|
|
8ce110e069 | ||
|
|
90aee11c33 | ||
|
|
4fc73bd7f3 | ||
|
|
f7fc4b201d | ||
|
|
84999b6996 | ||
|
|
b0f193071d | ||
|
|
d1c3374ccb | ||
|
|
cba8ba0466 | ||
|
|
d50f8d7b19 | ||
|
|
969fbbfb5a | ||
|
|
1cd5397b12 | ||
|
|
1829dbd7b6 | ||
|
|
9e3b231e6f | ||
|
|
5911a677d4 | ||
|
|
bb60bb4f7a | ||
|
|
ddec75315e | ||
|
|
8bcb1f8766 | ||
|
|
5a0ac4ba94 | ||
|
|
673689d53b | ||
|
|
ace8e17f02 | ||
|
|
eb9c63541a | ||
|
|
b9f4d9f6fc | ||
|
|
eda3522ce8 | ||
|
|
3cefd73fcc | ||
|
|
3547bcb517 | ||
|
|
53b528f649 | ||
|
|
798770f9e5 | ||
|
|
4a920243a0 | ||
|
|
8727195b84 | ||
|
|
456f2ca40f | ||
|
|
b4258aaa25 | ||
|
|
5d9647544a | ||
|
|
1fdb6a3e7e | ||
|
|
7c77b3a4b2 | ||
|
|
eb4b8292b6 | ||
|
|
16bc58ea49 | ||
|
|
6028ce8bc1 | ||
|
|
349e7e4c37 | ||
|
|
a982118c1f | ||
|
|
40d12ed7e2 | ||
|
|
9e0720207a | ||
|
|
88e738c343 | ||
|
|
8bbc2e2ade | ||
|
|
1509e54435 | ||
|
|
ca164d6619 | ||
|
|
a632576231 | ||
|
|
70b16cfb59 | ||
|
|
1d89d4dc13 | ||
|
|
bc8a0f56ed | ||
|
|
4e09396f8a | ||
|
|
0d488c6e4f | ||
|
|
50e76bc3d3 | ||
|
|
a543fd79f1 | ||
|
|
ab50aa61db | ||
|
|
d06a102e69 | ||
|
|
b0f0d2f1fb | ||
|
|
6029b600f0 | ||
|
|
e6107e336c | ||
|
|
9ef57c3c86 | ||
|
|
3b021bb4ac | ||
|
|
42bac83c2e | ||
|
|
7e38e9e515 | ||
|
|
cf5ffedc23 | ||
|
|
3a9aea753d | ||
|
|
c61da8381c | ||
|
|
8de814eddf | ||
|
|
c9341a2532 | ||
|
|
8b202b3fb2 | ||
|
|
2ecc260e0e | ||
|
|
6130fe8f57 | ||
|
|
d3aa7a620e | ||
|
|
2794e14870 | ||
|
|
cc1e990c7e | ||
|
|
59e6b0b100 | ||
|
|
ec53d02814 | ||
|
|
389c77cf83 | ||
|
|
17c87b4c29 | ||
|
|
91453c5ba0 | ||
|
|
a587a10c86 | ||
|
|
cfe77fcd90 | ||
|
|
6cf36a1817 | ||
|
|
ee40cfa830 | ||
|
|
04f64d4ac6 | ||
|
|
779fef7d41 | ||
|
|
5be3ca396b | ||
|
|
e420441bc2 | ||
|
|
a039dc16fa | ||
|
|
b31c89b110 | ||
|
|
bc4f3d6cbd | ||
|
|
40209506b7 | ||
|
|
6ff07c7753 | ||
|
|
d874c6d79c | ||
|
|
927e739e0a | ||
|
|
dd607d11d5 | ||
|
|
d436e97fc6 | ||
|
|
f3983d60c2 | ||
|
|
40e705d39e | ||
|
|
d92457467a | ||
|
|
4c2734fe14 | ||
|
|
34d791189d | ||
|
|
eec9eced1b | ||
|
|
3bc8a7aa5f | ||
|
|
3b045c289d | ||
|
|
4b93c57d44 | ||
|
|
377e7de0d2 | ||
|
|
0a9c84dd25 | ||
|
|
2ececcd03e | ||
|
|
f4f67adf49 | ||
|
|
220898b4de | ||
|
|
450f938056 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
|
||||
- uses: docker/metadata-action@9dc751fe249ad99385a2583ee0d084c400eee04e
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
6
.github/workflows/style/requirements.txt
vendored
6
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==23.11.0
|
||||
black==23.12.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.6.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
||||
|
||||
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -194,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
20
CITATION.cff
20
CITATION.cff
@@ -31,13 +31,17 @@ type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the
|
||||
complexity of HPC software is quickly outpacing the capabilities of existing software management
|
||||
tools. Scientific applications require specific versions of compilers, MPI, and other dependency
|
||||
libraries, so using a single, standard software stack is infeasible. However, managing many
|
||||
configurations is difficult because the configuration space is combinatorial in size. We
|
||||
introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages
|
||||
and dependencies. It allows any number of builds to coexist on the same system, and it ensures
|
||||
that installed packages can find their dependencies, regardless of the environment. We show
|
||||
through real-world use cases that Spack supports diverse and demanding applications, bringing
|
||||
order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
@@ -71,7 +75,7 @@ preferred-citation:
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
|
||||
44
README.md
44
README.md
@@ -1,13 +1,34 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
<div align="left">
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
<h2>
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-white-text.svg" width="250">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
<img alt="Spack" src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
</picture>
|
||||
|
||||
<br>
|
||||
<br clear="all">
|
||||
|
||||
<a href="https://github.com/spack/spack/actions/workflows/ci.yml"><img src="https://github.com/spack/spack/workflows/ci/badge.svg" alt="CI Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/bootstrapping.yml"><img src="https://github.com/spack/spack/actions/workflows/bootstrap.yml/badge.svg" alt="Bootstrap Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/build-containers.yml"><img src="https://github.com/spack/spack/actions/workflows/build-containers.yml/badge.svg" alt="Containers Status"></a>
|
||||
<a href="https://spack.readthedocs.io"><img src="https://readthedocs.org/projects/spack/badge/?version=latest" alt="Documentation Status"></a>
|
||||
<a href="https://codecov.io/gh/spack/spack"><img src="https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg" alt="Code coverage"/></a>
|
||||
<a href="https://slack.spack.io"><img src="https://slack.spack.io/badge.svg" alt="Slack"/></a>
|
||||
<a href="https://matrix.to/#/#spack-space:matrix.org"><img src="https://img.shields.io/matrix/spack-space%3Amatrix.org?label=matrix" alt="Matrix"/></a>
|
||||
|
||||
</h2>
|
||||
|
||||
**[Getting Started] • [Config] • [Community] • [Contributing] • [Packaging Guide]**
|
||||
|
||||
[Getting Started]: https://spack.readthedocs.io/en/latest/getting_started.html
|
||||
[Config]: https://spack.readthedocs.io/en/latest/configuration.html
|
||||
[Community]: #community
|
||||
[Contributing]: https://spack.readthedocs.io/en/latest/contribution_guide.html
|
||||
[Packaging Guide]: https://spack.readthedocs.io/en/latest/packaging_guide.html
|
||||
|
||||
</div>
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -66,10 +87,11 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
||||
Contributing
|
||||
------------------------
|
||||
|
||||
@@ -153,7 +153,43 @@ keyring, and trusting all downloaded keys.
|
||||
List of popular build caches
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_'
|
||||
|
||||
-------------------
|
||||
Build cache signing
|
||||
-------------------
|
||||
|
||||
By default, Spack will add a cryptographic signature to each package pushed to
|
||||
a build cache, and verifies the signature when installing from a build cache.
|
||||
|
||||
Keys for signing can be managed with the :ref:`spack gpg <cmd-spack-gpg>` command,
|
||||
as well as ``spack buildcache keys`` as mentioned above.
|
||||
|
||||
You can disable signing when pushing with ``spack buildcache push --unsigned``,
|
||||
and disable verification when installing from any build cache with
|
||||
``spack install --no-check-signature``.
|
||||
|
||||
Alternatively, signing and verification can be enabled or disabled on a per build cache
|
||||
basis:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --signed <name> <url> # enable signing and verification
|
||||
$ spack mirror add --unsigned <name> <url> # disable signing and verification
|
||||
|
||||
$ spack mirror set --signed <name> # enable signing and verification for an existing mirror
|
||||
$ spack mirror set --unsigned <name> # disable signing and verification for an existing mirror
|
||||
|
||||
Or you can directly edit the ``mirrors.yaml`` configuration file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mirrors:
|
||||
<name>:
|
||||
url: <url>
|
||||
signed: false # disable signing and verification
|
||||
|
||||
See also :ref:`mirrors`.
|
||||
|
||||
----------
|
||||
Relocation
|
||||
@@ -251,87 +287,13 @@ To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
* `spack/setup-spack <https://github.com/spack/setup-spack>`_ for setting up Spack in GitHub
|
||||
Actions
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_ for
|
||||
more details on the public build cache
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('cmake', type='build')
|
||||
depends_on("cmake", type="build")
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -90,7 +90,7 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('cmake@2.8.12:', type='build')
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -137,10 +137,10 @@ and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
'-DWHATEVER:STRING=somevalue',
|
||||
self.define('ENABLE_BROKEN_FEATURE', False),
|
||||
self.define_from_variant('DETECT_HDF5', 'hdf5'),
|
||||
self.define_from_variant('THREADS'), # True if +threads
|
||||
"-DWHATEVER:STRING=somevalue",
|
||||
self.define("ENABLE_BROKEN_FEATURE", False),
|
||||
self.define_from_variant("DETECT_HDF5", "hdf5"),
|
||||
self.define_from_variant("THREADS"), # True if +threads
|
||||
]
|
||||
|
||||
return args
|
||||
@@ -151,10 +151,10 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('example', default=True, when='@2.0:')
|
||||
variant("example", default=True, when="@2.0:")
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define_from_variant('EXAMPLE', 'example')]
|
||||
return [self.define_from_variant("EXAMPLE", "example")]
|
||||
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
@@ -193,9 +193,9 @@ a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
variant("build_type", default="RelWithDebInfo",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
@@ -205,9 +205,9 @@ package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
variant("build_type", default="DebugRelease",
|
||||
description="The build type to build",
|
||||
values=("Debug", "Release", "DebugRelease"))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = 'Ninja'
|
||||
generator = "Ninja"
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
@@ -258,7 +258,7 @@ Ninja generator, make sure to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('ninja', type='build')
|
||||
depends_on("ninja", type="build")
|
||||
|
||||
to the package as well. Aside from that, you shouldn't need to do
|
||||
anything else. Spack will automatically detect that you are using
|
||||
@@ -288,7 +288,7 @@ like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
root_cmakelists_dir = 'src'
|
||||
root_cmakelists_dir = "src"
|
||||
|
||||
|
||||
Note that this path is relative to the root of the extracted tarball,
|
||||
@@ -304,7 +304,7 @@ different sub-directory, simply override ``build_directory`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'my-build'
|
||||
build_directory = "my-build"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build and install targets
|
||||
@@ -324,8 +324,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -90,7 +90,7 @@ and optimizers do require a paid license. In Spack, they are packaged as:
|
||||
TODO: Confirm and possible change(!) the scope of MPI components (runtime
|
||||
vs. devel) in current (and previous?) *cluster/professional/composer*
|
||||
editions, i.e., presence in downloads, possibly subject to license
|
||||
coverage(!); see `disussion in PR #4300
|
||||
coverage(!); see `discussion in PR #4300
|
||||
<https://github.com/spack/spack/pull/4300#issuecomment-305582898>`_. [NB:
|
||||
An "mpi" subdirectory is not indicative of the full MPI SDK being present
|
||||
(i.e., ``mpicc``, ..., and header files). The directory may just as well
|
||||
@@ -934,9 +934,9 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
.. code-block:: python
|
||||
|
||||
# Examples for absolute and conditional dependencies:
|
||||
depends_on('mkl')
|
||||
depends_on('mkl', when='+mkl')
|
||||
depends_on('mkl', when='fftw=mkl')
|
||||
depends_on("mkl")
|
||||
depends_on("mkl", when="+mkl")
|
||||
depends_on("mkl", when="fftw=mkl")
|
||||
|
||||
The ``MKLROOT`` environment variable (part of the documented API) will be set
|
||||
during all stages of client package installation, and is available to both
|
||||
@@ -972,8 +972,8 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.append('--with-blas=%s' % self.spec['blas'].libs.ld_flags)
|
||||
args.append('--with-lapack=%s' % self.spec['lapack'].libs.ld_flags)
|
||||
args.append("--with-blas=%s" % self.spec["blas"].libs.ld_flags)
|
||||
args.append("--with-lapack=%s" % self.spec["lapack"].libs.ld_flags)
|
||||
...
|
||||
|
||||
.. tip::
|
||||
@@ -989,13 +989,13 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec['blas'].headers.include_flags
|
||||
self.spec["blas"].headers.include_flags
|
||||
|
||||
and to generate linker options (``-L<dir> -llibname ...``), use the same as above,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec['blas'].libs.ld_flags
|
||||
self.spec["blas"].libs.ld_flags
|
||||
|
||||
See
|
||||
:ref:`MakefilePackage <makefilepackage>`
|
||||
|
||||
@@ -88,7 +88,7 @@ override the ``luarocks_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
return ["flag1", "flag2"]
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ class automatically adds the following dependencies:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
depends_on("java", type=("build", "run"))
|
||||
depends_on("maven", type="build")
|
||||
|
||||
|
||||
In the ``pom.xml`` file, you may see sections like:
|
||||
@@ -72,8 +72,8 @@ should add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('java@7:', type='build')
|
||||
depends_on('maven@3.5.4:', type='build')
|
||||
depends_on("java@7:", type="build")
|
||||
depends_on("maven@3.5.4:", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -88,9 +88,9 @@ the build phase. For example:
|
||||
|
||||
def build_args(self):
|
||||
return [
|
||||
'-Pdist,native',
|
||||
'-Dtar',
|
||||
'-Dmaven.javadoc.skip=true'
|
||||
"-Pdist,native",
|
||||
"-Dtar",
|
||||
"-Dmaven.javadoc.skip=true"
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -86,8 +86,8 @@ the ``MesonPackage`` base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('meson', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -95,8 +95,8 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('meson@0.43.0:', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
depends_on("meson@0.43.0:", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -121,7 +121,7 @@ override the ``meson_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def meson_args(self):
|
||||
return ['--warnlevel=3']
|
||||
return ["--warnlevel=3"]
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
|
||||
@@ -118,7 +118,7 @@ so ``PerlPackage`` contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('perl')
|
||||
extends("perl")
|
||||
|
||||
|
||||
If your package requires a specific version of Perl, you should
|
||||
@@ -132,14 +132,14 @@ properly. If your package uses ``Makefile.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('perl-extutils-makemaker', type='build')
|
||||
depends_on("perl-extutils-makemaker", type="build")
|
||||
|
||||
|
||||
If your package uses ``Build.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('perl-module-build', type='build')
|
||||
depends_on("perl-module-build", type="build")
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -165,11 +165,11 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
expat = self.spec['expat'].prefix
|
||||
expat = self.spec["expat"].prefix
|
||||
|
||||
return [
|
||||
'EXPATLIBPATH={0}'.format(expat.lib),
|
||||
'EXPATINCPATH={0}'.format(expat.include),
|
||||
"EXPATLIBPATH={0}".format(expat.lib),
|
||||
"EXPATINCPATH={0}".format(expat.include),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('qt', type='build')
|
||||
depends_on("qt", type="build")
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, or need to
|
||||
@@ -91,7 +91,7 @@ link to the ``qt`` libraries, you can override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('qt@5.6.0:')
|
||||
depends_on("qt@5.6.0:")
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to qmake
|
||||
@@ -103,7 +103,7 @@ override the ``qmake_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def qmake_args(self):
|
||||
return ['-recursive']
|
||||
return ["-recursive"]
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
@@ -118,7 +118,7 @@ sub-directory by adding the following to the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'src'
|
||||
build_directory = "src"
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -163,28 +163,28 @@ attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cran = 'caret'
|
||||
cran = "caret"
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://cloud.r-project.org/package=caret'
|
||||
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
|
||||
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
|
||||
homepage = "https://cloud.r-project.org/package=caret"
|
||||
url = "https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz"
|
||||
list_url = "https://cloud.r-project.org/src/contrib/Archive/caret"
|
||||
|
||||
Likewise, the following ``bioc`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bioc = 'BiocVersion'
|
||||
bioc = "BiocVersion"
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://bioconductor.org/packages/BiocVersion/'
|
||||
git = 'https://git.bioconductor.org/packages/BiocVersion'
|
||||
homepage = "https://bioconductor.org/packages/BiocVersion/"
|
||||
git = "https://git.bioconductor.org/packages/BiocVersion"
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -200,7 +200,7 @@ base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('r')
|
||||
extends("r")
|
||||
|
||||
|
||||
Take a close look at the homepage for ``caret``. If you look at the
|
||||
@@ -209,7 +209,7 @@ You should add this to your package like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('r@3.2.0:', type=('build', 'run'))
|
||||
depends_on("r@3.2.0:", type=("build", "run"))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -227,7 +227,7 @@ and list all of their dependencies in the following sections:
|
||||
* LinkingTo
|
||||
|
||||
As far as Spack is concerned, all 3 of these dependency types
|
||||
correspond to ``type=('build', 'run')``, so you don't have to worry
|
||||
correspond to ``type=("build", "run")``, so you don't have to worry
|
||||
about the details. If you are curious what they mean,
|
||||
https://github.com/spack/spack/issues/2951 has a pretty good summary:
|
||||
|
||||
@@ -330,7 +330,7 @@ the dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('r-lattice@0.20:', type=('build', 'run'))
|
||||
depends_on("r-lattice@0.20:", type=("build", "run"))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -361,20 +361,20 @@ like so:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
mpi_name = self.spec['mpi'].name
|
||||
mpi_name = self.spec["mpi"].name
|
||||
|
||||
# The type of MPI. Supported values are:
|
||||
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
|
||||
if mpi_name == 'openmpi':
|
||||
Rmpi_type = 'OPENMPI'
|
||||
elif mpi_name == 'mpich':
|
||||
Rmpi_type = 'MPICH2'
|
||||
if mpi_name == "openmpi":
|
||||
Rmpi_type = "OPENMPI"
|
||||
elif mpi_name == "mpich":
|
||||
Rmpi_type = "MPICH2"
|
||||
else:
|
||||
raise InstallError('Unsupported MPI type')
|
||||
raise InstallError("Unsupported MPI type")
|
||||
|
||||
return [
|
||||
'--with-Rmpi-type={0}'.format(Rmpi_type),
|
||||
'--with-mpi={0}'.format(spec['mpi'].prefix),
|
||||
"--with-Rmpi-type={0}".format(Rmpi_type),
|
||||
"--with-mpi={0}".format(spec["mpi"].prefix),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -84,8 +84,8 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
|
||||
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
|
||||
summary = "An implementation of the AsciiDoc text processor and publishing toolchain"
|
||||
description = "A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats."
|
||||
|
||||
|
||||
Either of these can be used for the description of the Spack package.
|
||||
@@ -98,7 +98,7 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
homepage = 'https://asciidoctor.org'
|
||||
homepage = "https://asciidoctor.org"
|
||||
|
||||
|
||||
This should be used as the official homepage of the Spack package.
|
||||
@@ -112,21 +112,21 @@ the base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('ruby')
|
||||
extends("ruby")
|
||||
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
required_ruby_version = '>= 2.3.0'
|
||||
required_ruby_version = ">= 2.3.0"
|
||||
|
||||
|
||||
This can be added to the Spack package using:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('ruby@2.3.0:', type=('build', 'run'))
|
||||
depends_on("ruby@2.3.0:", type=("build", "run"))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -124,7 +124,7 @@ are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ['PyQt5']
|
||||
import_modules = ["PyQt5"]
|
||||
|
||||
|
||||
These tests often catch missing dependencies and non-RPATHed
|
||||
|
||||
@@ -63,8 +63,8 @@ run package-specific unit tests.
|
||||
.. code-block:: python
|
||||
|
||||
def installtest(self):
|
||||
with working_dir('test'):
|
||||
pytest = which('py.test')
|
||||
with working_dir("test"):
|
||||
pytest = which("py.test")
|
||||
pytest()
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ the following dependency automatically:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.5:', type='build')
|
||||
depends_on("python@2.5:", type="build")
|
||||
|
||||
|
||||
Waf only supports Python 2.5 and up.
|
||||
@@ -113,7 +113,7 @@ phase, you can use:
|
||||
args = []
|
||||
|
||||
if self.run_tests:
|
||||
args.append('--test')
|
||||
args.append("--test")
|
||||
|
||||
return args
|
||||
|
||||
|
||||
@@ -9,34 +9,96 @@
|
||||
Container Images
|
||||
================
|
||||
|
||||
Spack :ref:`environments` are a great tool to create container images, but
|
||||
preparing one that is suitable for production requires some more boilerplate
|
||||
than just:
|
||||
Spack :ref:`environments` can easily be turned into container images. This page
|
||||
outlines two ways in which this can be done:
|
||||
|
||||
1. By installing the environment on the host system, and copying the installations
|
||||
into the container image. This approach does not require any tools like Docker
|
||||
or Singularity to be installed.
|
||||
2. By generating a Docker or Singularity recipe that can be used to build the
|
||||
container image. In this approach, Spack builds the software inside the
|
||||
container runtime, not on the host system.
|
||||
|
||||
The first approach is easiest if you already have an installed environment,
|
||||
the second approach gives more control over the container image.
|
||||
|
||||
---------------------------
|
||||
From existing installations
|
||||
---------------------------
|
||||
|
||||
If you already have a Spack environment installed on your system, you can
|
||||
share the binaries as an OCI compatible container image. To get started you
|
||||
just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Create and install an environment in the current directory
|
||||
spack env create -d .
|
||||
spack -e . add pkg-a pkg-b
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ docker run -it example.com/name/image:my_env
|
||||
|
||||
The image generated by Spack consists of the specified base image with each package from the
|
||||
environment as a separate layer on top. The image is minimal by construction, it only contains the
|
||||
environment roots and its runtime dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
|
||||
the password for your account, but a personal access token you need to generate separately.
|
||||
|
||||
The specified ``--base-image`` should have a libc that is compatible with the host system.
|
||||
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``
|
||||
or newer: the libc in the container image must be at least the version of the host system,
|
||||
assuming ABI compatibility. It is also perfectly fine to use a completely different
|
||||
Linux distribution as long as the libc is compatible.
|
||||
|
||||
For convenience, Spack also turns the OCI registry into a :ref:`build cache <binary_caches_oci>`,
|
||||
so that future ``spack install`` of the environment will simply pull the binaries from the
|
||||
registry instead of doing source builds. The flag ``--update-index`` is needed to make Spack
|
||||
take the build cache into account when concretizing.
|
||||
|
||||
.. note::
|
||||
|
||||
When generating container images in CI, the approach above is recommended when CI jobs
|
||||
already run in a sandboxed environment. You can simply use ``spack`` directly
|
||||
in the CI job and push the resulting image to a registry. Subsequent CI jobs should
|
||||
run faster because Spack can install from the same registry instead of rebuilding from
|
||||
sources.
|
||||
|
||||
---------------------------------------------
|
||||
Generating recipes for Docker and Singularity
|
||||
---------------------------------------------
|
||||
|
||||
Apart from copying existing installations into container images, Spack can also
|
||||
generate recipes for container images. This is useful if you want to run Spack
|
||||
itself in a sandboxed environment instead of on the host system.
|
||||
|
||||
Since recipes need a little bit more boilerplate than
|
||||
|
||||
.. code-block:: docker
|
||||
|
||||
COPY spack.yaml /environment
|
||||
RUN spack -e /environment install
|
||||
|
||||
Additional actions may be needed to minimize the size of the
|
||||
container, or to update the system software that is installed in the base
|
||||
image, or to set up a proper entrypoint to run the image. These tasks are
|
||||
usually both necessary and repetitive, so Spack comes with a command
|
||||
to generate recipes for container images starting from a ``spack.yaml``.
|
||||
Spack provides a command to generate customizable recipes for container images. Customizations
|
||||
include minimizing the size of the image, installing packages in the base image using the system
|
||||
package manager, and setting up a proper entrypoint to run the image.
|
||||
|
||||
.. seealso::
|
||||
|
||||
This page is a reference for generating recipes to build container images.
|
||||
It means that your environment is built from scratch inside the container
|
||||
runtime.
|
||||
|
||||
Since v0.21, Spack can also create container images from existing package installations
|
||||
on your host system. See :ref:`binary_caches_oci` for more information on
|
||||
that topic.
|
||||
|
||||
--------------------
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
A Quick Introduction
|
||||
--------------------
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Consider having a Spack environment like the following:
|
||||
|
||||
@@ -47,8 +109,8 @@ Consider having a Spack environment like the following:
|
||||
- gromacs+mpi
|
||||
- mpich
|
||||
|
||||
Producing a ``Dockerfile`` from it is as simple as moving to the directory
|
||||
where the ``spack.yaml`` file is stored and giving the following command:
|
||||
Producing a ``Dockerfile`` from it is as simple as changing directories to
|
||||
where the ``spack.yaml`` file is stored and running the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -114,9 +176,9 @@ configuration are discussed in details in the sections below.
|
||||
|
||||
.. _container_spack_images:
|
||||
|
||||
--------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Spack Images on Docker Hub
|
||||
--------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
@@ -186,9 +248,9 @@ by Spack use them as default base images for their ``build`` stage,
|
||||
even though handles to use custom base images provided by users are
|
||||
available to accommodate complex use cases.
|
||||
|
||||
---------------------------------
|
||||
Creating Images From Environments
|
||||
---------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuring the Container Recipe
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Any Spack Environment can be used for the automatic generation of container
|
||||
recipes. Sensible defaults are provided for things like the base image or the
|
||||
@@ -229,18 +291,18 @@ under the ``container`` attribute of environments:
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
Setting Base Images
|
||||
-------------------
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
Use Official Spack Images From Dockerhub
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
To generate a recipe that uses an official Docker image from the
|
||||
Spack organization to build the software and the corresponding official OS image
|
||||
@@ -445,9 +507,9 @@ responsibility to ensure that:
|
||||
Therefore we don't recommend its use in cases that can be otherwise
|
||||
covered by the simplified mode shown first.
|
||||
|
||||
----------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Singularity Definition Files
|
||||
----------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In addition to producing recipes in ``Dockerfile`` format Spack can produce
|
||||
Singularity Definition Files by just changing the value of the ``format``
|
||||
@@ -468,9 +530,9 @@ attribute:
|
||||
The minimum version of Singularity required to build a SIF (Singularity Image Format)
|
||||
image from the recipes generated by Spack is ``3.5.3``.
|
||||
|
||||
------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Extending the Jinja2 Templates
|
||||
------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The Dockerfile and the Singularity definition file that Spack can generate are based on
|
||||
a few Jinja2 templates that are rendered according to the environment being containerized.
|
||||
@@ -591,9 +653,9 @@ The recipe that gets generated contains the two extra instruction that we added
|
||||
|
||||
.. _container_config_options:
|
||||
|
||||
-----------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuration Reference
|
||||
-----------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The tables below describe all the configuration options that are currently supported
|
||||
to customize the generation of container recipes:
|
||||
@@ -690,13 +752,13 @@ to customize the generation of container recipes:
|
||||
- Description string
|
||||
- No
|
||||
|
||||
--------------
|
||||
~~~~~~~~~~~~~~
|
||||
Best Practices
|
||||
--------------
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
^^^
|
||||
"""
|
||||
MPI
|
||||
^^^
|
||||
"""
|
||||
Due to the dependency on Fortran for OpenMPI, which is the spack default
|
||||
implementation, consider adding ``gfortran`` to the ``apt-get install`` list.
|
||||
|
||||
@@ -707,9 +769,9 @@ For execution on HPC clusters, it can be helpful to import the docker
|
||||
image into Singularity in order to start a program with an *external*
|
||||
MPI. Otherwise, also add ``openssh-server`` to the ``apt-get install`` list.
|
||||
|
||||
^^^^
|
||||
""""
|
||||
CUDA
|
||||
^^^^
|
||||
""""
|
||||
Starting from CUDA 9.0, Nvidia provides minimal CUDA images based on
|
||||
Ubuntu. Please see `their instructions <https://hub.docker.com/r/nvidia/cuda/>`_.
|
||||
Avoid double-installing CUDA by adding, e.g.
|
||||
@@ -728,9 +790,9 @@ to your ``spack.yaml``.
|
||||
Users will either need ``nvidia-docker`` or e.g. Singularity to *execute*
|
||||
device kernels.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
Docker on Windows and OSX
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
On Mac OS and Windows, docker runs on a hypervisor that is not allocated much
|
||||
memory by default, and some spack packages may fail to build due to lack of
|
||||
|
||||
@@ -9,46 +9,42 @@
|
||||
Custom Extensions
|
||||
=================
|
||||
|
||||
*Spack extensions* permit you to extend Spack capabilities by deploying your
|
||||
*Spack extensions* allow you to extend Spack capabilities by deploying your
|
||||
own custom commands or logic in an arbitrary location on your filesystem.
|
||||
This might be extremely useful e.g. to develop and maintain a command whose purpose is
|
||||
too specific to be considered for reintegration into the mainline or to
|
||||
evolve a command through its early stages before starting a discussion to merge
|
||||
it upstream.
|
||||
|
||||
From Spack's point of view an extension is any path in your filesystem which
|
||||
respects a prescribed naming and layout for files:
|
||||
respects the following naming and layout for files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
|
||||
├── pytest.ini # Optional file if the extension ships its own tests
|
||||
├── scripting # Folder that may contain modules that are needed for the extension commands
|
||||
│ └── cmd # Folder containing extension commands
|
||||
│ └── filter.py # A new command that will be available
|
||||
├── tests # Tests for this extension
|
||||
│ ├── cmd # Folder containing extension commands
|
||||
│ │ └── filter.py # A new command that will be available
|
||||
│ └── functions.py # Module with internal details
|
||||
└── tests # Tests for this extension
|
||||
│ ├── conftest.py
|
||||
│ └── test_filter.py
|
||||
└── templates # Templates that may be needed by the extension
|
||||
|
||||
In the example above the extension named *scripting* adds an additional command (``filter``)
|
||||
and unit tests to verify its behavior. The code for this example can be
|
||||
obtained by cloning the corresponding git repository:
|
||||
In the example above, the extension is named *scripting*. It adds an additional command
|
||||
(``spack filter``) and unit tests to verify its behavior.
|
||||
|
||||
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
|
||||
The extension can import any core Spack module in its implementation. When loaded by
|
||||
the ``spack`` command, the extension itself is imported as a Python package in the
|
||||
``spack.extensions`` namespace. In the example above, since the extension is named
|
||||
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
|
||||
|
||||
The code for this example extension can be obtained by cloning the corresponding git repository:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd ~/
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
remote: Counting objects: 11, done.
|
||||
remote: Compressing objects: 100% (7/7), done.
|
||||
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
|
||||
Receiving objects: 100% (11/11), done.
|
||||
|
||||
As you can see by inspecting the sources, Python modules that are part of the extension
|
||||
can import any core Spack module.
|
||||
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
|
||||
|
||||
---------------------------------
|
||||
Configure Spack to Use Extensions
|
||||
@@ -61,7 +57,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- ~/tmp/spack-scripting
|
||||
- /tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
@@ -86,37 +82,32 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unit-test --extension=scripting
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
|
||||
========================================== test session starts ===========================================
|
||||
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
|
||||
rootdir: /home/culpo/github/spack-scripting
|
||||
configfile: pytest.ini
|
||||
testpaths: tests
|
||||
plugins: xdist-3.5.0
|
||||
collected 5 items
|
||||
|
||||
tests/test_filter.py ...XX
|
||||
============================================================ short test summary info =============================================================
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
tests/test_filter.py ..... [100%]
|
||||
|
||||
=========================================================== slowest 20 test durations ============================================================
|
||||
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================
|
||||
========================================== slowest 30 durations ==========================================
|
||||
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
|
||||
@@ -250,10 +250,9 @@ Compiler configuration
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
specifying them manually in ``compilers.yaml``, or automatically by
|
||||
running ``spack compiler find``, but for convenience Spack will
|
||||
automatically detect compilers the first time it needs them.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -458,52 +457,6 @@ specification. The operations available to modify the environment are ``set``, `
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field using the
|
||||
``compilers`` key. The ``compilers`` key accepts compilers for ``c``,
|
||||
``cxx``, ``fortran``, and ``f77``.
|
||||
|
||||
For all other fields from the ``compilers`` config, they can be added
|
||||
to the ``extra_attributes`` field for an external representing a
|
||||
compiler. These fields are used as-is in the internal representation
|
||||
of the compiler config.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
|
||||
@@ -111,3 +111,28 @@ CUDA is split into fewer components and is simpler to specify:
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
Using an External OpenGL API
|
||||
-----------------------------------
|
||||
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
|
||||
|
||||
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
|
||||
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libglx:
|
||||
require: [opengl]
|
||||
opengl:
|
||||
buildable: false
|
||||
externals:
|
||||
- prefix: /usr/
|
||||
spec: opengl@4.6
|
||||
|
||||
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
|
||||
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.
|
||||
|
||||
@@ -97,6 +97,35 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extra attributes for external packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes external packages require additional attributes to be used
|
||||
effectively. This information can be defined on a per-package basis
|
||||
and stored in the ``extra_attributes`` section of the external package
|
||||
configuration. In addition to per-package information, this section
|
||||
can be used to define environment modifications to be performed
|
||||
whenever the package is used. For example, if an external package is
|
||||
built without ``rpath`` support, it may require ``LD_LIBRARY_PATH``
|
||||
settings to find its dependencies. This could be configured as
|
||||
follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: mpich@3.3 %clang@12.0.0 +hwloc
|
||||
prefix: /path/to/mpich
|
||||
extra_attributes:
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /path/to/hwloc/lib64
|
||||
|
||||
See :ref:`configuration_environment_variables` for more information on
|
||||
how to configure environment modifications in Spack config files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -5284,7 +5284,7 @@ installed example.
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after runnig the tests
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -5781,7 +5781,7 @@ with those implemented in the package itself.
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortan
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.18.1
|
||||
pygments==2.17.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
isort==5.13.2
|
||||
black==23.12.1
|
||||
flake8==6.1.0
|
||||
mypy==1.7.0
|
||||
mypy==1.8.0
|
||||
|
||||
@@ -142,7 +142,7 @@ Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
development and release packages. Only one key pair exists in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
@@ -272,7 +272,7 @@ Internal Implementation
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the technical
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
@@ -295,7 +295,7 @@ infrastructure.
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
run in AWS, and one key for each affiliated institution (e.g. University of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
@@ -305,7 +305,7 @@ contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
built by a protected job running on an affiliated institution's infrastructure
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
|
||||
@@ -1047,9 +1047,9 @@ def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
return GroupedExceptionForwarder(context, self, base)
|
||||
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
@@ -1072,15 +1072,18 @@ class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
self._base = base
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
if not issubclass(exc_type, self._base):
|
||||
return False
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
|
||||
@@ -726,13 +726,46 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
"""Reports issues with 'depends_on' directives.
|
||||
|
||||
Issues might be unknown dependencies, unknown variants or variant values, or declaration
|
||||
of nested dependencies.
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
for when, dependency_edge in dependency_data.items():
|
||||
dependency_spec = dependency_edge.spec
|
||||
nested_dependencies = dependency_spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = (
|
||||
f"{pkg_name}: invalid nested dependency "
|
||||
f"declaration '{str(dependency_spec)}'"
|
||||
)
|
||||
details = [
|
||||
f"split depends_on('{str(dependency_spec)}', when='{str(when)}') "
|
||||
f"into {len(nested_dependencies) + 1} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
for s in (dependency_spec, when):
|
||||
if s.virtual and s.variants:
|
||||
summary = f"{pkg_name}: virtual dependency cannot have variants"
|
||||
details = [
|
||||
f"remove variants from '{str(s)}' in depends_on directive",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -230,7 +230,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
)
|
||||
return
|
||||
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
dag_hash = indexed_spec.dag_hash()
|
||||
@@ -1601,14 +1605,14 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
||||
return spec_dict, layout_version
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
@@ -1629,7 +1633,9 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1647,8 +1653,16 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
mirror_urls = try_first + try_next
|
||||
|
||||
mirrors = try_first + try_next
|
||||
# TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here.
|
||||
def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
@@ -1657,14 +1671,17 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# Override mirror's default if
|
||||
currently_unsigned = unsigned if unsigned is not None else not mirror.signed
|
||||
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
fetch_url = mirror.fetch_url
|
||||
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
if fetch_url.startswith("oci://"):
|
||||
ref = spack.oci.image.ImageReference.from_string(
|
||||
fetch_url[len("oci://") :]
|
||||
).with_tag(spack.oci.image.default_tag(spec))
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
@@ -1701,7 +1718,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
@@ -1723,13 +1740,16 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix)
|
||||
specfile_path = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix
|
||||
)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
@@ -1742,21 +1762,21 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
|
||||
if try_signed and not unsigned:
|
||||
if try_signed and not currently_unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
tty.warn(f"Failed to verify: {specfile_url}")
|
||||
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
if currently_unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
@@ -1779,6 +1799,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
@@ -1977,7 +1998,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, ".spack")
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
@@ -1997,7 +2018,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if signature_required:
|
||||
if os.path.exists("%s.asc" % specfile_path):
|
||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||
try:
|
||||
@@ -2046,7 +2067,7 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
@@ -2072,7 +2093,8 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified = download_result["signature_verified"]
|
||||
signature_verified: bool = download_result["signature_verified"]
|
||||
signature_required: bool = download_result["signature_required"]
|
||||
tmpdir = None
|
||||
|
||||
if layout_version == 0:
|
||||
@@ -2081,7 +2103,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, signature_required, bchecksum
|
||||
)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
@@ -2094,9 +2118,10 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
if signature_required and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
"To install unsigned packages, use the --no-check-signature option, "
|
||||
"or configure the mirror with signed: false."
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
@@ -2209,7 +2234,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
extract_tarball(spec, download_result, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
@@ -206,17 +207,19 @@ def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
elif platform == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
else:
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
@@ -147,7 +147,7 @@ def _add_compilers_if_missing() -> None:
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
||||
@@ -386,7 +386,7 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
@@ -441,7 +441,7 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
@@ -86,12 +85,9 @@ def __init__(self) -> None:
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
@@ -100,11 +96,9 @@ def update_installations(self) -> None:
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
if sys.platform == "win32":
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
self.install_all()
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
@@ -122,25 +116,6 @@ def update_syspath_and_environ(self) -> None:
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self) -> None:
|
||||
model = depfile.MakefileModel.from_env(self)
|
||||
template = spack.tengine.make_environment().get_template(
|
||||
os.path.join("depfile", "Makefile")
|
||||
)
|
||||
makefile = self.environment_root() / "Makefile"
|
||||
makefile.write_text(template.render(model.to_dict()))
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
|
||||
@@ -66,7 +66,6 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
|
||||
@@ -1032,6 +1032,11 @@ def get_env_modifications(self) -> EnvironmentModifications:
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(run_env_mods, spec)
|
||||
pkg.setup_run_environment(run_env_mods)
|
||||
|
||||
external_env = (dspec.extra_attributes or {}).get("environment", {})
|
||||
if external_env:
|
||||
run_env_mods.extend(spack.schema.environment.parse(external_env))
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
# Don't let the runtime environment of comiler like dependencies leak into the
|
||||
# build env
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
@@ -285,6 +286,19 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -292,6 +306,9 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
||||
@@ -10,13 +10,12 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(PackageBase):
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages that are built using Perl."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -61,6 +60,30 @@ class PerlBuilder(BaseBuilder):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def build_method(self):
|
||||
"""Searches the package for either a Makefile.PL or Build.PL.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||
"""
|
||||
if os.path.isfile("Makefile.PL"):
|
||||
build_method = "Makefile.PL"
|
||||
elif os.path.isfile("Build.PL"):
|
||||
build_method = "Build.PL"
|
||||
else:
|
||||
raise RuntimeError("Unknown build_method for perl package")
|
||||
return build_method
|
||||
|
||||
@property
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = inspect.getmodule(self.pkg).make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
|
||||
def configure_args(self):
|
||||
"""List of arguments passed to :py:meth:`~.PerlBuilder.configure`.
|
||||
|
||||
@@ -73,19 +96,7 @@ def configure(self, pkg, spec, prefix):
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||
"""
|
||||
if os.path.isfile("Makefile.PL"):
|
||||
self.build_method = "Makefile.PL"
|
||||
self.build_executable = inspect.getmodule(self.pkg).make
|
||||
elif os.path.isfile("Build.PL"):
|
||||
self.build_method = "Build.PL"
|
||||
self.build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
else:
|
||||
raise RuntimeError("Unknown build_method for perl package")
|
||||
|
||||
if self.build_method == "Makefile.PL":
|
||||
options = ["Makefile.PL", "INSTALL_BASE={0}".format(prefix)]
|
||||
elif self.build_method == "Build.PL":
|
||||
|
||||
@@ -6,13 +6,14 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from typing import Iterable, List, Mapping, Optional
|
||||
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -25,14 +26,18 @@
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary):
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
@@ -50,7 +55,7 @@ class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
def import_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that the Python package provides.
|
||||
|
||||
These are used to test whether or not the installation succeeded.
|
||||
@@ -65,7 +70,7 @@ def import_modules(self):
|
||||
detected, this property can be overridden by the package.
|
||||
|
||||
Returns:
|
||||
list: list of strings of module names
|
||||
List of strings of module names.
|
||||
"""
|
||||
modules = []
|
||||
pkg = self.spec["python"].package
|
||||
@@ -102,14 +107,14 @@ def import_modules(self):
|
||||
return modules
|
||||
|
||||
@property
|
||||
def skip_modules(self):
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of import_modules. If a module has submodules,
|
||||
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||
|
||||
Returns:
|
||||
list: list of strings of module names
|
||||
List of strings of module names.
|
||||
"""
|
||||
return []
|
||||
|
||||
@@ -185,12 +190,12 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test_imports(self):
|
||||
def test_imports(self) -> None:
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -315,24 +320,27 @@ class PythonPackage(PythonExtension):
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/project/" + name + "/"
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -350,7 +358,7 @@ def headers(self):
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
def libs(self) -> LibraryList:
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -384,7 +392,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["test"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls):
|
||||
def std_args(cls) -> List[str]:
|
||||
return [
|
||||
# Verbose
|
||||
"-vvv",
|
||||
@@ -409,7 +417,7 @@ def std_args(cls):
|
||||
]
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""The root directory of the Python package.
|
||||
|
||||
This is usually the directory containing one of the following files:
|
||||
@@ -420,51 +428,51 @@ def build_directory(self):
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
def config_settings(self, spec: Spec, prefix: Prefix) -> Mapping[str, object]:
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
dict: Possibly nested dictionary of KEY, VALUE settings
|
||||
Possibly nested dictionary of KEY, VALUE settings.
|
||||
"""
|
||||
return {}
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
def install_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
list: list of options
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
list: list of options
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
@@ -108,6 +108,8 @@ class ROCmPackage(PackageBase):
|
||||
"gfx90a:xnack+",
|
||||
"gfx90c",
|
||||
"gfx940",
|
||||
"gfx941",
|
||||
"gfx942",
|
||||
"gfx1010",
|
||||
"gfx1011",
|
||||
"gfx1012",
|
||||
@@ -168,6 +170,8 @@ def hip_flags(amdgpu_target):
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
|
||||
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
|
||||
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
|
||||
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
|
||||
|
||||
@@ -1253,6 +1253,7 @@ def main_script_replacements(cmd):
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
cleanup_job["dependencies"] = []
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
@@ -1276,6 +1277,7 @@ def main_script_replacements(cmd):
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -1296,6 +1298,7 @@ def main_script_replacements(cmd):
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
final_job["interruptible"] = True
|
||||
final_job["dependencies"] = []
|
||||
|
||||
output_object["rebuild-index"] = final_job
|
||||
|
||||
|
||||
@@ -6,10 +6,8 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -147,89 +145,37 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags:
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
def quote_kvp(string: str) -> str:
|
||||
"""For strings like ``name=value`` or ``name==value``, quote and escape the value if needed.
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
This is a compromise to respect quoting of key-value pairs on the CLI. The shell
|
||||
strips quotes from quoted arguments, so we cannot know *exactly* how CLI arguments
|
||||
were quoted. To compensate, we re-add quotes around anything staritng with ``name=``
|
||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||
"""
|
||||
match = spack.parser.SPLIT_KVP.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self) -> str:
|
||||
single_errors = [
|
||||
"({0}) {1} {2} => {3}".format(
|
||||
i + 1,
|
||||
match.group(0),
|
||||
next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent(
|
||||
"""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}"""
|
||||
).format("\n".join(single_errors))
|
||||
key, delim, value = match.groups()
|
||||
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
"""
|
||||
concretize = kwargs.get("concretize", False)
|
||||
normalize = kwargs.get("normalize", False)
|
||||
tests = kwargs.get("tests", False)
|
||||
args = [args] if isinstance(args, str) else args
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, str):
|
||||
sargs = " ".join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
specs = spack.parser.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
# Unquoted flags will be read as a variant or hash
|
||||
if unquoted_flags and ("variant" in msg or "hash" in msg):
|
||||
msg += "\n\n"
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg) from e
|
||||
specs = spack.parser.parse(arg_string)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "add a spec to an environment"
|
||||
section = "environments"
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
section = "system"
|
||||
@@ -68,12 +68,8 @@
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
)
|
||||
|
||||
|
||||
@@ -106,7 +102,7 @@ def setup_parser(subparser):
|
||||
disable.add_argument("name", help="name of the source to be disabled", nargs="?", default=None)
|
||||
|
||||
reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
|
||||
spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
|
||||
arguments.add_common_arguments(reset, ["yes_to_all"])
|
||||
|
||||
root = sp.add_parser("root", help="get/set the root bootstrap directory")
|
||||
_add_scope_option(root)
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -38,8 +37,10 @@
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
@@ -76,7 +77,19 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
|
||||
"--unsigned",
|
||||
"-u",
|
||||
action="store_false",
|
||||
dest="signed",
|
||||
default=None,
|
||||
help="push unsigned buildcache tarballs",
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--signed",
|
||||
action="store_true",
|
||||
dest="signed",
|
||||
default=None,
|
||||
help="push signed buildcache tarballs",
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
@@ -110,7 +123,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--base-image", default=None, help="specify the base image for the buildcache. "
|
||||
"--base-image", default=None, help="specify the base image for the buildcache"
|
||||
)
|
||||
push.add_argument(
|
||||
"--tag",
|
||||
"-t",
|
||||
default=None,
|
||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||
"runtime dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
@@ -182,23 +202,22 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_spec_or_specfile.add_argument(
|
||||
check_specs.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
@@ -320,26 +339,36 @@ def push_fn(args):
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(args.mirror)
|
||||
target_image = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
target_image = None
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
# When neither --signed, --unsigned nor --key are specified, use the mirror's default.
|
||||
if args.signed is None and not args.key:
|
||||
unsigned = not mirror.signed
|
||||
else:
|
||||
unsigned = not (args.key or args.signed)
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if image_ref:
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not args.unsigned:
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
@@ -347,26 +376,48 @@ def push_fn(args):
|
||||
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
specs,
|
||||
roots,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
url = args.mirror.push_url
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
failed = []
|
||||
|
||||
# TODO: unify this logic in the future.
|
||||
if image_ref:
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
if args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
@@ -374,10 +425,10 @@ def push_fn(args):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
@@ -385,7 +436,7 @@ def push_fn(args):
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {url}"
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
@@ -427,11 +478,11 @@ def push_fn(args):
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if image_ref and len(skipped) < len(specs) and args.update_index:
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
@@ -497,17 +548,21 @@ def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
spec: spack.spec.Spec,
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
@@ -516,7 +571,7 @@ def _put_manifest(
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
@@ -528,20 +583,14 @@ def _put_manifest(
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
# We use this to store the Spack spec, so we can use it to create an index.
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
config.update(spec_dict)
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
@@ -572,48 +621,69 @@ def _put_manifest(
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
"annotations": {"org.opencontainers.image.description": spec.format()},
|
||||
}
|
||||
|
||||
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
|
||||
if annotations:
|
||||
oci_manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
|
||||
upload_manifest_with_retry(image_ref, oci_manifest=oci_manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
return image_ref_for_spec
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
args,
|
||||
image_ref: ImageReference,
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: multiprocessing.pool.Pool,
|
||||
) -> List[str]:
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
args: The command line arguments.
|
||||
image_ref: The image reference.
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
List[str]: The list of skipped specs (already in the buildcache).
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# The base image to use for the package. When not set, we use
|
||||
# the OCI registry only for storage, and do not use any base image.
|
||||
base_image_ref: Optional[ImageReference] = (
|
||||
ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
)
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@@ -623,11 +693,11 @@ def _push_oci(
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not args.force:
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
@@ -640,46 +710,63 @@ def _push_oci(
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base image layers, probably fine to do sequentially.
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
# Get base image details, if we don't have them yet
|
||||
if architecture in base_images:
|
||||
continue
|
||||
if base_image_ref is None:
|
||||
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
|
||||
else:
|
||||
base_images[architecture] = copy_missing_layers_with_retry(
|
||||
base_image_ref, image_ref, architecture
|
||||
)
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pushed_image_ref = pool.starmap(
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
@@ -816,15 +903,24 @@ def check_fn(args: argparse.Namespace):
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs)
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "change an existing spec in an environment"
|
||||
section = "environments"
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.environment.depfile
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -606,7 +607,9 @@ def ci_rebuild(args):
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
|
||||
@@ -12,13 +12,13 @@
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.caches
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
|
||||
@@ -67,12 +67,13 @@ class ConstraintAction(argparse.Action):
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
# Query specs from command line
|
||||
self.values = values
|
||||
namespace.constraint = values
|
||||
self.constraint = namespace.constraint = values
|
||||
self.constraint_specs = namespace.constraint_specs = []
|
||||
namespace.specs = self._specs
|
||||
|
||||
def _specs(self, **kwargs):
|
||||
qspecs = spack.cmd.parse_specs(self.values)
|
||||
# store parsed specs in spec.constraint after a call to specs()
|
||||
self.constraint_specs[:] = spack.cmd.parse_specs(self.constraint)
|
||||
|
||||
# If an environment is provided, we'll restrict the search to
|
||||
# only its installed packages.
|
||||
@@ -81,12 +82,12 @@ def _specs(self, **kwargs):
|
||||
kwargs["hashes"] = set(env.all_hashes())
|
||||
|
||||
# return everything for an empty query.
|
||||
if not qspecs:
|
||||
if not self.constraint_specs:
|
||||
return spack.store.STORE.db.query(**kwargs)
|
||||
|
||||
# Return only matching stuff otherwise.
|
||||
specs = {}
|
||||
for spec in qspecs:
|
||||
for spec in self.constraint_specs:
|
||||
for s in spack.store.STORE.db.query(spec, **kwargs):
|
||||
# This is fast for already-concrete specs
|
||||
specs[s.dag_hash()] = s
|
||||
@@ -124,6 +125,33 @@ def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, deptype)
|
||||
|
||||
|
||||
class ConfigScope(argparse.Action):
|
||||
"""Pick the currently configured config scopes."""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
kwargs.setdefault("metavar", spack.config.SCOPES_METAVAR)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
return self._default() if callable(self._default) else self._default
|
||||
|
||||
@default.setter
|
||||
def default(self, value):
|
||||
self._default = value
|
||||
|
||||
@property
|
||||
def choices(self):
|
||||
return spack.config.scopes().keys()
|
||||
|
||||
@choices.setter
|
||||
def choices(self, value):
|
||||
pass
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, values)
|
||||
|
||||
|
||||
def _cdash_reporter(namespace):
|
||||
"""Helper function to create a CDash reporter. This function gets an early reference to the
|
||||
argparse namespace under construction, so it can later use it to create the object.
|
||||
@@ -357,10 +385,11 @@ def install_status():
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in an upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
help=(
|
||||
"show install status of packages\n"
|
||||
"[+] installed [^] installed in an upstream\n"
|
||||
" - not installed [-] missing dep of installed package\n"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -21,10 +21,11 @@ def confirm_action(specs: List[spack.spec.Spec], participle: str, noun: str):
|
||||
participle: action expressed as a participle, e.g. "uninstalled"
|
||||
noun: action expressed as a noun, e.g. "uninstallation"
|
||||
"""
|
||||
tty.msg(f"The following {len(specs)} packages will be {participle}:\n")
|
||||
spack.cmd.display_specs(specs, **display_args)
|
||||
print("")
|
||||
answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
|
||||
print()
|
||||
answer = tty.get_yes_or_no(
|
||||
f"{len(specs)} packages will be {participle}. Do you want to proceed?", default=False
|
||||
)
|
||||
if not answer:
|
||||
tty.msg(f"Aborting {noun}")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import build_environment, traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.context import Context
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage compilers"
|
||||
section = "system"
|
||||
@@ -23,8 +24,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find",
|
||||
@@ -47,9 +46,8 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -60,32 +58,20 @@ def setup_parser(subparser):
|
||||
)
|
||||
remove_parser.add_argument("compiler_spec")
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
"--scope", action=arguments.ConfigScope, default=None, help="configuration scope to modify"
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
# Info
|
||||
info_parser = sp.add_parser("info", help="show compiler paths")
|
||||
info_parser.add_argument("compiler_spec")
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
|
||||
@@ -103,7 +89,7 @@ def compiler_find(args):
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.config
|
||||
from spack.cmd.common import arguments
|
||||
from spack.cmd.compiler import compiler_list
|
||||
|
||||
description = "list available compilers"
|
||||
@@ -12,13 +12,8 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
@@ -18,6 +18,7 @@
|
||||
import spack.schema.packages
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.editor import editor
|
||||
|
||||
description = "get and set configuration options"
|
||||
@@ -26,14 +27,9 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
)
|
||||
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="config_command")
|
||||
@@ -53,6 +49,7 @@ def setup_parser(subparser):
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
@@ -101,13 +98,13 @@ def setup_parser(subparser):
|
||||
setup_parser.add_parser = add_parser
|
||||
|
||||
update = sp.add_parser("update", help="update configuration files to the latest format")
|
||||
spack.cmd.common.arguments.add_common_arguments(update, ["yes_to_all"])
|
||||
arguments.add_common_arguments(update, ["yes_to_all"])
|
||||
update.add_argument("section", help="section to update")
|
||||
|
||||
revert = sp.add_parser(
|
||||
"revert", help="revert configuration files to their state before update"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(revert, ["yes_to_all"])
|
||||
arguments.add_common_arguments(revert, ["yes_to_all"])
|
||||
revert.add_argument("section", help="section to update")
|
||||
|
||||
|
||||
@@ -136,32 +133,50 @@ def _get_scope_and_section(args):
|
||||
return scope, section
|
||||
|
||||
|
||||
def print_configuration(args, *, blame: bool) -> None:
|
||||
if args.scope and args.section is None:
|
||||
tty.die(f"the argument --scope={args.scope} requires specifying a section.")
|
||||
|
||||
if args.section is not None:
|
||||
spack.config.CONFIG.print_section(args.section, blame=blame, scope=args.scope)
|
||||
return
|
||||
|
||||
print_flattened_configuration(blame=blame)
|
||||
|
||||
|
||||
def print_flattened_configuration(*, blame: bool) -> None:
|
||||
"""Prints to stdout a flattened version of the configuration.
|
||||
|
||||
Args:
|
||||
blame: if True, shows file provenance for each entry in the configuration.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env is not None:
|
||||
pristine = env.manifest.pristine_yaml_content
|
||||
flattened = pristine.copy()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||
else:
|
||||
flattened = syaml.syaml_dict()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = syaml.syaml_dict()
|
||||
|
||||
for config_section in spack.config.SECTION_SCHEMAS:
|
||||
current = spack.config.get(config_section)
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY][config_section] = current
|
||||
syaml.dump_config(flattened, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
|
||||
|
||||
def config_get(args):
|
||||
"""Dump merged YAML configuration for a specific section.
|
||||
|
||||
With no arguments and an active environment, print the contents of
|
||||
the environment's manifest file (spack.yaml).
|
||||
"""
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
else:
|
||||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
print_configuration(args, blame=False)
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
print_configuration(args, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
|
||||
@@ -10,10 +10,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.common.confirmation as confirmation
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove specs from the concretized lockfile of an environment"
|
||||
section = "environments"
|
||||
|
||||
@@ -9,11 +9,11 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show dependencies of a package"
|
||||
section = "basic"
|
||||
|
||||
@@ -9,10 +9,10 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show packages that depend on another"
|
||||
section = "basic"
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
|
||||
@@ -9,9 +9,9 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "developer build: build from code in current working directory"
|
||||
section = "build"
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
@@ -45,10 +45,41 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
entry = {"spec": str(spec)}
|
||||
if path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec, abspath):
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage[0].disable_mirrors()
|
||||
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
if not args.spec:
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
@@ -66,7 +97,7 @@ def develop(parser, args):
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
env.develop(spec=spec, path=path, clone=True)
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
@@ -81,12 +112,16 @@ def develop(parser, args):
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
raise SpackError("Packages to develop must have a concrete version")
|
||||
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# default path is relative path to spec.name
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
if not os.path.isabs(path):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
|
||||
# clone default: only if the path doesn't exist
|
||||
clone = args.clone
|
||||
@@ -96,15 +131,24 @@ def develop(parser, args):
|
||||
if not clone and not os.path.exists(abspath):
|
||||
raise SpackError("Provided path %s does not exist" % abspath)
|
||||
|
||||
if clone and os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
if clone:
|
||||
if os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
changed = env.develop(spec, path, clone)
|
||||
if changed:
|
||||
env.write()
|
||||
_update_config(spec, path)
|
||||
|
||||
@@ -10,11 +10,11 @@
|
||||
from llnl.util.tty.color import cprint, get_color_when
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.util.environment
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "compare two specs"
|
||||
section = "basic"
|
||||
@@ -44,6 +44,9 @@ def setup_parser(subparser):
|
||||
action="append",
|
||||
help="select the attributes to show (defaults to all)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--ignore", action="append", help="omit diffs related to these dependencies"
|
||||
)
|
||||
|
||||
|
||||
def shift(asp_function):
|
||||
@@ -54,7 +57,7 @@ def shift(asp_function):
|
||||
return asp.AspFunction(first, rest)
|
||||
|
||||
|
||||
def compare_specs(a, b, to_string=False, color=None):
|
||||
def compare_specs(a, b, to_string=False, color=None, ignore_packages=None):
|
||||
"""
|
||||
Generate a comparison, including diffs (for each side) and an intersection.
|
||||
|
||||
@@ -73,6 +76,14 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
if color is None:
|
||||
color = get_color_when()
|
||||
|
||||
a = a.copy()
|
||||
b = b.copy()
|
||||
|
||||
if ignore_packages:
|
||||
for pkg_name in ignore_packages:
|
||||
a.trim(pkg_name)
|
||||
b.trim(pkg_name)
|
||||
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
@@ -209,7 +220,7 @@ def diff(parser, args):
|
||||
|
||||
# Calculate the comparison (c)
|
||||
color = False if args.dump_json else get_color_when()
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color)
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color, ignore_packages=args.ignore)
|
||||
|
||||
# Default to all attributes
|
||||
attributes = args.attribute or ["all"]
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.modules
|
||||
import spack.cmd.uninstall
|
||||
@@ -31,6 +30,7 @@
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
|
||||
@@ -10,10 +10,10 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "list extensions for package"
|
||||
section = "extensions"
|
||||
|
||||
@@ -14,12 +14,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage external packages in Spack configuration"
|
||||
section = "config"
|
||||
@@ -29,8 +29,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
"--not-buildable",
|
||||
@@ -48,15 +46,14 @@ def setup_parser(subparser):
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
|
||||
@@ -6,11 +6,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "fetch archives for packages"
|
||||
section = "build"
|
||||
|
||||
@@ -12,9 +12,9 @@
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "list and search installed packages"
|
||||
@@ -261,10 +261,8 @@ def find(parser, args):
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
msg = "No package matches the query: {0}"
|
||||
msg = msg.format(" ".join(args.constraint))
|
||||
tty.msg(msg)
|
||||
raise SystemExit(1)
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.common.confirmation
|
||||
import spack.cmd.uninstall
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
|
||||
@@ -17,31 +18,91 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-E",
|
||||
"--except-any-environment",
|
||||
action="store_true",
|
||||
help="remove everything unless needed by an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-e",
|
||||
"--except-environment",
|
||||
metavar="ENV",
|
||||
action="append",
|
||||
default=[],
|
||||
help="remove everything unless needed by specified environment\n"
|
||||
"you can list multiple environments, or specify directory\n"
|
||||
"environments by path.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--keep-build-dependencies",
|
||||
action="store_true",
|
||||
help="do not remove installed build-only dependencies of roots\n"
|
||||
"(default is to keep only link & run dependencies)",
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def roots_from_environments(args, active_env):
|
||||
# if we're using -E or -e, make a list of environments whose roots we should consider.
|
||||
all_environments = []
|
||||
|
||||
# -E will garbage collect anything not needed by any env, including the current one
|
||||
if args.except_any_environment:
|
||||
all_environments += list(ev.all_environments())
|
||||
if active_env:
|
||||
all_environments.append(active_env)
|
||||
|
||||
# -e says "also preserve things needed by this particular env"
|
||||
for env_name_or_dir in args.except_environment:
|
||||
print("HMM", env_name_or_dir)
|
||||
if ev.exists(env_name_or_dir):
|
||||
env = ev.read(env_name_or_dir)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
env = ev.Environment(env_name_or_dir)
|
||||
else:
|
||||
tty.die(f"No such environment: '{env_name_or_dir}'")
|
||||
all_environments.append(env)
|
||||
|
||||
# add root hashes from all considered environments to list of roots
|
||||
root_hashes = set()
|
||||
for env in all_environments:
|
||||
root_hashes |= set(env.concretized_order)
|
||||
|
||||
return root_hashes
|
||||
|
||||
|
||||
def gc(parser, args):
|
||||
specs = spack.store.STORE.db.unused_specs
|
||||
deptype = dt.LINK | dt.RUN
|
||||
if args.keep_build_dependencies:
|
||||
deptype |= dt.BUILD
|
||||
|
||||
# Restrict garbage collection to the active environment
|
||||
# speculating over roots that are yet to be installed
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
msg = 'Restricting the garbage collection to the "{0}" environment'
|
||||
tty.msg(msg.format(env.name))
|
||||
env.concretize()
|
||||
roots = [s for s in env.roots()]
|
||||
all_hashes = set([s.dag_hash() for r in roots for s in r.traverse()])
|
||||
lr_hashes = set([s.dag_hash() for r in roots for s in r.traverse(deptype=("link", "run"))])
|
||||
maybe_to_be_removed = all_hashes - lr_hashes
|
||||
specs = [s for s in specs if s.dag_hash() in maybe_to_be_removed]
|
||||
active_env = ev.active_environment()
|
||||
|
||||
if not specs:
|
||||
msg = "There are no unused specs. Spack's store is clean."
|
||||
tty.msg(msg)
|
||||
return
|
||||
# wrap the whole command with a read transaction to avoid multiple
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if args.except_environment or args.except_any_environment:
|
||||
# if either of these is specified, we ignore the active environment and garbage
|
||||
# collect anything NOT in specified environments.
|
||||
root_hashes = roots_from_environments(args, active_env)
|
||||
|
||||
if not args.yes_to_all:
|
||||
spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstallation")
|
||||
elif active_env:
|
||||
# only gc what's in current environment
|
||||
tty.msg(f"Restricting garbage collection to environment '{active_env.name}'")
|
||||
root_hashes = set(spack.store.STORE.db.all_hashes()) # keep everything
|
||||
root_hashes -= set(active_env.all_hashes()) # except this env
|
||||
root_hashes |= set(active_env.concretized_order) # but keep its roots
|
||||
else:
|
||||
# consider all explicit specs roots (the default for db.unused_specs())
|
||||
root_hashes = None
|
||||
|
||||
spack.cmd.uninstall.do_uninstall(specs, force=False)
|
||||
specs = spack.store.STORE.db.unused_specs(root_hashes=root_hashes, deptype=deptype)
|
||||
if not specs:
|
||||
tty.msg("There are no unused specs. Spack's store is clean.")
|
||||
return
|
||||
|
||||
if not args.yes_to_all:
|
||||
spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstall")
|
||||
|
||||
spack.cmd.uninstall.do_uninstall(specs, force=False)
|
||||
|
||||
@@ -7,11 +7,11 @@
|
||||
import os
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "handle GPG actions for spack"
|
||||
section = "packaging"
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
|
||||
@@ -11,13 +11,13 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import preferred_version
|
||||
|
||||
description = "get detailed information on a particular package"
|
||||
@@ -327,7 +327,7 @@ def _variants_by_name_when(pkg):
|
||||
"""Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
|
||||
# TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
|
||||
variants = {}
|
||||
for name, (variant, whens) in pkg.variants.items():
|
||||
for name, (variant, whens) in sorted(pkg.variants.items()):
|
||||
for when in whens:
|
||||
variants.setdefault(name, {}).setdefault(when, []).append(variant)
|
||||
return variants
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
@@ -23,6 +22,7 @@
|
||||
import spack.report
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
@@ -162,8 +162,8 @@ def setup_parser(subparser):
|
||||
"--no-check-signature",
|
||||
action="store_true",
|
||||
dest="unsigned",
|
||||
default=False,
|
||||
help="do not check signatures of binary packages",
|
||||
default=None,
|
||||
help="do not check signatures of binary packages (override mirror config)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--show-log-on-error",
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
|
||||
description = "list and search available packages"
|
||||
|
||||
@@ -8,12 +8,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "add package to the user environment"
|
||||
section = "user environment"
|
||||
@@ -98,15 +98,15 @@ def load(parser, args):
|
||||
spack.cmd.display_specs(results)
|
||||
return
|
||||
|
||||
constraint_specs = spack.cmd.parse_specs(args.constraint)
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.constraint)
|
||||
spack.cmd.disambiguate_spec(spec, env, first=args.load_first) for spec in constraint_specs
|
||||
]
|
||||
|
||||
if not args.shell:
|
||||
specs_str = " ".join(args.constraint) or "SPECS"
|
||||
specs_str = " ".join(str(s) for s in constraint_specs) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||
"spack load", f" eval `spack load {{sh_arg}} {specs_str}`"
|
||||
)
|
||||
return 1
|
||||
|
||||
|
||||
@@ -9,11 +9,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "print out locations of packages and spack directories"
|
||||
section = "basic"
|
||||
|
||||
@@ -8,11 +8,11 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
@@ -20,6 +19,7 @@
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "manage mirrors (source and binary)"
|
||||
@@ -88,18 +88,14 @@ def setup_parser(subparser):
|
||||
"--mirror-url", metavar="mirror_url", type=str, help="find mirror to destroy by url"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
add_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
add_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
@@ -111,15 +107,31 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
help="do not require signing and signature verification when pushing and installing from "
|
||||
"this build cache",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
add_parser_signed.add_argument(
|
||||
"--signed",
|
||||
help="require signing and signature verification when pushing and installing from this "
|
||||
"build cache",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
arguments.add_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -136,9 +148,8 @@ def setup_parser(subparser):
|
||||
)
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_url_parser, False)
|
||||
@@ -163,11 +174,27 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
help="do not require signing and signature verification when pushing and installing from "
|
||||
"this build cache",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--signed",
|
||||
help="require signing and signature verification when pushing and installing from this "
|
||||
"build cache",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_parser, False)
|
||||
@@ -175,11 +202,7 @@ def setup_parser(subparser):
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
|
||||
@@ -194,6 +217,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
@@ -209,6 +233,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
@@ -241,6 +267,8 @@ def _configure_mirror(args):
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
section = "environment"
|
||||
@@ -388,21 +388,15 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
callbacks[args.subparser_name](module_type, specs, args)
|
||||
|
||||
except MultipleSpecsMatch:
|
||||
msg = "the constraint '{query}' matches multiple packages:\n"
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||
for s in specs:
|
||||
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||
tty.error(msg.format(query=args.constraint))
|
||||
tty.die(
|
||||
"In this context exactly **one** match is needed: "
|
||||
"please specify your constraints better."
|
||||
)
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
||||
except NoSpecMatches:
|
||||
msg = "the constraint '{query}' matches no package."
|
||||
tty.error(msg.format(query=args.constraint))
|
||||
tty.die(
|
||||
"In this context exactly **one** match is needed: "
|
||||
"please specify your constraints better."
|
||||
)
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches no package."
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
||||
@@ -6,12 +6,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "patch expanded archive sources in preparation for install"
|
||||
section = "build"
|
||||
|
||||
@@ -12,11 +12,11 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.executable as exe
|
||||
import spack.util.package_hash as ph
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "query packages associated with particular git revisions"
|
||||
section = "developer"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove specs from an environment"
|
||||
section = "environments"
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage package source repositories"
|
||||
section = "config"
|
||||
@@ -19,7 +20,6 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -42,11 +42,7 @@ def setup_parser(subparser):
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=repo_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
# Add
|
||||
@@ -54,9 +50,8 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument("path", help="path to a Spack package repository directory")
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -67,9 +62,8 @@ def setup_parser(subparser):
|
||||
)
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "revert checked out package source code"
|
||||
section = "build"
|
||||
|
||||
@@ -12,12 +12,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.solver.asp as asp
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
section = "developer"
|
||||
|
||||
@@ -10,11 +10,11 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show what would be installed, given a spec"
|
||||
section = "build"
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "expand downloaded archive in preparation for install"
|
||||
section = "build"
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
from llnl.util.tty import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.report
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "run spack's tests for an install"
|
||||
section = "admin"
|
||||
|
||||
@@ -10,11 +10,11 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.util.git
|
||||
import spack.util.gpg
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "set up spack for our tutorial (WARNING: modifies config!)"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove specs from an environment"
|
||||
section = "environments"
|
||||
@@ -17,21 +17,51 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="remove all specs from (clear) the environment"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
|
||||
|
||||
def _update_config(specs_to_remove, remove_all=False):
|
||||
def change_fn(dev_config):
|
||||
modified = False
|
||||
for spec in specs_to_remove:
|
||||
if spec.name in dev_config:
|
||||
tty.msg("Undevelop: removing {0}".format(spec.name))
|
||||
del dev_config[spec.name]
|
||||
modified = True
|
||||
if remove_all and dev_config:
|
||||
dev_config.clear()
|
||||
modified = True
|
||||
return modified
|
||||
|
||||
spack.config.update_all("develop", change_fn)
|
||||
|
||||
|
||||
def undevelop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="undevelop")
|
||||
|
||||
remove_specs = None
|
||||
remove_all = False
|
||||
if args.all:
|
||||
specs = env.dev_specs.keys()
|
||||
remove_all = True
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
remove_specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="undevelop")
|
||||
with env.write_transaction():
|
||||
changed = False
|
||||
for spec in specs:
|
||||
tty.msg("Removing %s from environment %s development specs" % (spec, env.name))
|
||||
changed |= env.undevelop(spec)
|
||||
if changed:
|
||||
env.write()
|
||||
_update_config(remove_specs, remove_all)
|
||||
|
||||
updated_all_dev_specs = set(spack.config.get("develop"))
|
||||
remove_spec_names = set(x.name for x in remove_specs)
|
||||
|
||||
if remove_all:
|
||||
not_fully_removed = updated_all_dev_specs
|
||||
else:
|
||||
not_fully_removed = updated_all_dev_specs & remove_spec_names
|
||||
|
||||
if not_fully_removed:
|
||||
tty.msg(
|
||||
"The following specs could not be removed as develop specs"
|
||||
" - see `spack config blame develop` to locate files requiring"
|
||||
f" manual edits: {', '.join(not_fully_removed)}"
|
||||
)
|
||||
|
||||
@@ -10,13 +10,13 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.common.confirmation as confirmation
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "remove installed packages"
|
||||
@@ -277,7 +277,7 @@ def uninstall_specs(args, specs):
|
||||
return
|
||||
|
||||
if not args.yes_to_all:
|
||||
confirmation.confirm_action(uninstall_list, "uninstalled", "uninstallation")
|
||||
confirmation.confirm_action(uninstall_list, "uninstalled", "uninstall")
|
||||
|
||||
# Uninstall everything on the list
|
||||
do_uninstall(uninstall_list, args.force)
|
||||
|
||||
@@ -227,9 +227,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# has been used, then test that extension.
|
||||
pytest_root = spack.paths.spack_root
|
||||
if args.extension:
|
||||
target = args.extension
|
||||
extensions = spack.extensions.get_extension_paths()
|
||||
pytest_root = spack.extensions.path_for_extension(target, *extensions)
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
|
||||
@@ -7,10 +7,10 @@
|
||||
import sys
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove package from the user environment"
|
||||
section = "user environment"
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import infinity_versions, ver
|
||||
|
||||
description = "list available versions of a package"
|
||||
|
||||
@@ -334,40 +334,6 @@ def __init__(
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.cc == other.cc
|
||||
and self.cxx == other.cxx
|
||||
and self.fc == other.fc
|
||||
and self.f77 == other.f77
|
||||
and self.spec == other.spec
|
||||
and self.operating_system == other.operating_system
|
||||
and self.target == other.target
|
||||
and self.flags == other.flags
|
||||
and self.modules == other.modules
|
||||
and self.environment == other.environment
|
||||
and self.extra_rpaths == other.extra_rpaths
|
||||
and self.enable_implicit_rpaths == other.enable_implicit_rpaths
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.fc,
|
||||
self.f77,
|
||||
self.spec,
|
||||
self.operating_system,
|
||||
self.target,
|
||||
str(self.flags),
|
||||
str(self.modules),
|
||||
str(self.environment),
|
||||
str(self.extra_rpaths),
|
||||
self.enable_implicit_rpaths,
|
||||
)
|
||||
)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
@@ -118,8 +118,6 @@ def get_compiler_config(scope=None, init_config=False):
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
@@ -127,105 +125,6 @@ def get_compiler_config(scope=None, init_config=False):
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
packages = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in config.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
packages.extend(_compiler_config_from_package_config(externals_config))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _compiler_config_from_package_config(config):
|
||||
compilers = []
|
||||
for entry in config:
|
||||
compiler = _compiler_config_from_external(entry)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("compilers", {})
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
if "c" in paths:
|
||||
paths["cc"] = paths.pop("c")
|
||||
if "fortran" in paths:
|
||||
fc = paths.pop("fortran")
|
||||
paths["fc"] = fc
|
||||
if "f77" not in paths:
|
||||
paths["f77"] = fc
|
||||
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
return None
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": config.get("modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
@@ -243,20 +142,17 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -288,9 +184,6 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
msg = "`spack compiler remove` will not remove compilers defined in packages.yaml"
|
||||
msg += "\nTo remove these compilers, either edit the config or use `spack external remove`"
|
||||
tty.debug(msg)
|
||||
return removal_happened
|
||||
|
||||
|
||||
@@ -305,7 +198,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(scope)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -328,14 +221,7 @@ def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
return get_compiler_config(scope, init_config)
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
@@ -502,7 +388,7 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
config = get_compiler_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
items = items["compiler"]
|
||||
@@ -517,7 +403,10 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
if use_cache:
|
||||
config = all_compilers_config(scope, init_config)
|
||||
else:
|
||||
config = get_compiler_config(scope, init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -693,7 +582,9 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
|
||||
compilers = compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, use_cache=False
|
||||
)
|
||||
if compilers:
|
||||
scope_to_compilers[scope] = compilers
|
||||
|
||||
|
||||
@@ -20,16 +20,16 @@ def __init__(self, *args, **kwargs):
|
||||
self.version_argument = "-V"
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["craycc", "cc"]
|
||||
cc_names = ["craycc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["crayCC", "CC"]
|
||||
cxx_names = ["crayCC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["crayftn", "ftn"]
|
||||
f77_names = ["crayftn"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["crayftn", "ftn"]
|
||||
fc_names = ["crayftn"]
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
@@ -35,12 +35,9 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Dict, List, Optional, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, rename
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.paths
|
||||
@@ -70,6 +67,7 @@
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
"repos": spack.schema.repos.schema,
|
||||
"packages": spack.schema.packages.schema,
|
||||
@@ -113,28 +111,34 @@
|
||||
#: Base name for the (internal) overrides scope.
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
#: Type used for raw YAML configuration
|
||||
YamlConfigDict = Dict[str, Any]
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
A scope is one directory containing named configuration files.
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc).
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||
"""
|
||||
|
||||
def __init__(self, name, path):
|
||||
def __init__(self, name, path) -> None:
|
||||
self.name = name # scope name.
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return os.sep in self.name
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
"""Returns the filename associated with a given section"""
|
||||
_validate_section_name(section)
|
||||
return os.path.join(self.path, "%s.yaml" % section)
|
||||
return os.path.join(self.path, f"{section}.yaml")
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Returns the data associated with a given section"""
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
@@ -142,39 +146,44 @@ def get_section(self, section):
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section):
|
||||
def _write_section(self, section: str) -> None:
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
filesystem.mkdirp(self.path)
|
||||
with open(filename, "w") as f:
|
||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ConfigScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class SingleFileScope(ConfigScope):
|
||||
"""This class represents a configuration scope in a single YAML file."""
|
||||
|
||||
def __init__(self, name, path, schema, yaml_path=None):
|
||||
def __init__(
|
||||
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||
) -> None:
|
||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||
|
||||
Arguments:
|
||||
schema (dict): jsonschema for the file to read
|
||||
yaml_path (list): path in the schema where config data can be
|
||||
found.
|
||||
|
||||
If the schema accepts the following yaml data, the yaml_path
|
||||
would be ['outer', 'inner']
|
||||
|
||||
@@ -186,18 +195,18 @@ def __init__(self, name, path, schema, yaml_path=None):
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super().__init__(name, path)
|
||||
self._raw_data = None
|
||||
self._raw_data: Optional[YamlConfigDict] = None
|
||||
self.schema = schema
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section) -> str:
|
||||
return self.path
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
# read raw data from the file, which looks like:
|
||||
# {
|
||||
# 'config': {
|
||||
@@ -246,8 +255,8 @@ def get_section(self, section):
|
||||
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section):
|
||||
data_to_write = self._raw_data
|
||||
def _write_section(self, section: str) -> None:
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
# been written to disk. We need to construct the portion of the data
|
||||
@@ -277,18 +286,18 @@ def _write_section(self, section):
|
||||
validate(data_to_write, self.schema)
|
||||
try:
|
||||
parent = os.path.dirname(self.path)
|
||||
mkdirp(parent)
|
||||
filesystem.mkdirp(parent)
|
||||
|
||||
tmp = os.path.join(parent, ".%s.tmp" % os.path.basename(self.path))
|
||||
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
||||
with open(tmp, "w") as f:
|
||||
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
||||
rename(tmp, self.path)
|
||||
filesystem.rename(tmp, self.path)
|
||||
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to config file {str(e)}") from e
|
||||
|
||||
def __repr__(self):
|
||||
return "<SingleFileScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class ImmutableConfigScope(ConfigScope):
|
||||
@@ -297,11 +306,11 @@ class ImmutableConfigScope(ConfigScope):
|
||||
This is used for ConfigScopes passed on the command line.
|
||||
"""
|
||||
|
||||
def _write_section(self, section):
|
||||
raise ConfigError("Cannot write to immutable scope %s" % self)
|
||||
def _write_section(self, section) -> None:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
def __repr__(self):
|
||||
return "<ImmutableConfigScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class InternalConfigScope(ConfigScope):
|
||||
@@ -312,56 +321,58 @@ class InternalConfigScope(ConfigScope):
|
||||
override settings from files.
|
||||
"""
|
||||
|
||||
def __init__(self, name, data=None):
|
||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
super().__init__(name, None)
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
if data:
|
||||
if data is not None:
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Just reads from an internal dictionary."""
|
||||
if section not in self.sections:
|
||||
self.sections[section] = None
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section):
|
||||
def _write_section(self, section: str) -> None:
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<InternalConfigScope: %s>" % self.name
|
||||
def __repr__(self) -> str:
|
||||
return f"<InternalConfigScope: {self.name}>"
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
# no cache to clear here.
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _process_dict_keyname_overrides(data):
|
||||
def _process_dict_keyname_overrides(data: YamlConfigDict) -> YamlConfigDict:
|
||||
"""Turn a trailing `:' in a key name into an override attribute."""
|
||||
result = {}
|
||||
# Below we have a lot of type directives, since we hack on types and monkey-patch them
|
||||
# by adding attributes that otherwise they won't have.
|
||||
result: YamlConfigDict = {}
|
||||
for sk, sv in data.items():
|
||||
if sk.endswith(":"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.override = True
|
||||
key.override = True # type: ignore[attr-defined]
|
||||
elif sk.endswith("+"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.prepend = True
|
||||
key.prepend = True # type: ignore[attr-defined]
|
||||
elif sk.endswith("-"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.append = True
|
||||
key.append = True # type: ignore[attr-defined]
|
||||
else:
|
||||
key = sk
|
||||
key = sk # type: ignore[assignment]
|
||||
|
||||
if isinstance(sv, dict):
|
||||
result[key] = InternalConfigScope._process_dict_keyname_overrides(sv)
|
||||
@@ -394,7 +405,7 @@ class Configuration:
|
||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||
scopes: Dict[str, ConfigScope]
|
||||
|
||||
def __init__(self, *scopes: ConfigScope):
|
||||
def __init__(self, *scopes: ConfigScope) -> None:
|
||||
"""Initialize a configuration with an initial list of scopes.
|
||||
|
||||
Args:
|
||||
@@ -405,26 +416,26 @@ def __init__(self, *scopes: ConfigScope):
|
||||
self.scopes = collections.OrderedDict()
|
||||
for scope in scopes:
|
||||
self.push_scope(scope)
|
||||
self.format_updates: Dict[str, List[str]] = collections.defaultdict(list)
|
||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope):
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
||||
self.scopes[scope.name] = scope
|
||||
|
||||
@_config_mutator
|
||||
def pop_scope(self) -> ConfigScope:
|
||||
"""Remove the highest precedence scope and return it."""
|
||||
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||
return scope
|
||||
|
||||
@_config_mutator
|
||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||
scope = self.scopes.pop(scope_name, None)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||
return scope
|
||||
|
||||
@property
|
||||
@@ -481,16 +492,16 @@ def _validate_scope(self, scope: Optional[str]) -> ConfigScope:
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys())
|
||||
f"Invalid config scope: '{scope}'. Must be one of {self.scopes.keys()}"
|
||||
)
|
||||
|
||||
def get_config_filename(self, scope, section) -> str:
|
||||
def get_config_filename(self, scope: str, section: str) -> str:
|
||||
"""For some scope and section, get the name of the configuration file."""
|
||||
scope = self._validate_scope(scope)
|
||||
return scope.get_section_filename(section)
|
||||
|
||||
@_config_mutator
|
||||
def clear_caches(self):
|
||||
def clear_caches(self) -> None:
|
||||
"""Clears the caches for configuration files,
|
||||
|
||||
This will cause files to be re-read upon the next request."""
|
||||
@@ -500,7 +511,7 @@ def clear_caches(self):
|
||||
@_config_mutator
|
||||
def update_config(
|
||||
self, section: str, update_data: Dict, scope: Optional[str] = None, force: bool = False
|
||||
):
|
||||
) -> None:
|
||||
"""Update the configuration file for a particular scope.
|
||||
|
||||
Overwrites contents of a section in a scope with update_data,
|
||||
@@ -514,10 +525,10 @@ def update_config(
|
||||
format will fail to update unless ``force`` is True.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration to be updated
|
||||
update_data (dict): data to be used for the update
|
||||
scope (str): scope to be updated
|
||||
force (str): force the update
|
||||
section: section of the configuration to be updated
|
||||
update_data: data to be used for the update
|
||||
scope: scope to be updated
|
||||
force: force the update
|
||||
"""
|
||||
if self.format_updates.get(section) and not force:
|
||||
msg = (
|
||||
@@ -546,7 +557,7 @@ def update_config(
|
||||
|
||||
scope._write_section(section)
|
||||
|
||||
def get_config(self, section, scope=None):
|
||||
def get_config(self, section: str, scope: Optional[str] = None) -> YamlConfigDict:
|
||||
"""Get configuration settings for a section.
|
||||
|
||||
If ``scope`` is ``None`` or not provided, return the merged contents
|
||||
@@ -573,12 +584,12 @@ def get_config(self, section, scope=None):
|
||||
"""
|
||||
return self._get_config_memoized(section, scope)
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_config_memoized(self, section, scope):
|
||||
@lang.memoized
|
||||
def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfigDict:
|
||||
_validate_section_name(section)
|
||||
|
||||
if scope is None:
|
||||
scopes = self.scopes.values()
|
||||
scopes = list(self.scopes.values())
|
||||
else:
|
||||
scopes = [self._validate_scope(scope)]
|
||||
|
||||
@@ -613,7 +624,7 @@ def _get_config_memoized(self, section, scope):
|
||||
ret = syaml.syaml_dict(ret)
|
||||
return ret
|
||||
|
||||
def get(self, path, default=None, scope=None):
|
||||
def get(self, path: str, default: Optional[Any] = None, scope: Optional[str] = None) -> Any:
|
||||
"""Get a config section or a single value from one.
|
||||
|
||||
Accepts a path syntax that allows us to grab nested config map
|
||||
@@ -644,7 +655,7 @@ def get(self, path, default=None, scope=None):
|
||||
return value
|
||||
|
||||
@_config_mutator
|
||||
def set(self, path, value, scope=None):
|
||||
def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
"""Convenience function for setting single values in config files.
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
@@ -686,21 +697,22 @@ def set(self, path, value, scope=None):
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over scopes in this configuration."""
|
||||
for scope in self.scopes.values():
|
||||
yield scope
|
||||
yield from self.scopes.values()
|
||||
|
||||
def print_section(self, section, blame=False):
|
||||
def print_section(self, section: str, blame: bool = False, *, scope=None) -> None:
|
||||
"""Print a configuration to stdout."""
|
||||
try:
|
||||
data = syaml.syaml_dict()
|
||||
data[section] = self.get_config(section)
|
||||
data[section] = self.get_config(section, scope=scope)
|
||||
syaml.dump_config(data, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigError(f"cannot read '{section}' configuration") from e
|
||||
|
||||
|
||||
@contextmanager
|
||||
def override(path_or_scope, value=None):
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
) -> Generator[Union[lang.Singleton, Configuration], None, None]:
|
||||
"""Simple way to override config settings within a context.
|
||||
|
||||
Arguments:
|
||||
@@ -718,10 +730,10 @@ def override(path_or_scope, value=None):
|
||||
else:
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(rf"^{base_name}")]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
scope_name = f"{base_name}{num_overrides}"
|
||||
if scope_name in current_overrides:
|
||||
num_overrides += 1
|
||||
else:
|
||||
@@ -738,12 +750,13 @@ def override(path_or_scope, value=None):
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
#: configuration scopes added on the command line set by ``spack.main.main()``
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
plat_name = os.path.join(name, platform)
|
||||
@@ -751,7 +764,9 @@ def _add_platform_scope(cfg, scope_type, name, path):
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
|
||||
|
||||
def _add_command_line_scopes(cfg, command_line_scopes):
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument.
|
||||
|
||||
Command line scopes are named after their position in the arg list.
|
||||
@@ -760,26 +775,22 @@ def _add_command_line_scopes(cfg, command_line_scopes):
|
||||
# We ensure that these scopes exist and are readable, as they are
|
||||
# provided on the command line by the user.
|
||||
if not os.path.isdir(path):
|
||||
raise ConfigError("config scope is not a directory: '%s'" % path)
|
||||
raise ConfigError(f"config scope is not a directory: '{path}'")
|
||||
elif not os.access(path, os.R_OK):
|
||||
raise ConfigError("config scope is not readable: '%s'" % path)
|
||||
raise ConfigError(f"config scope is not readable: '{path}'")
|
||||
|
||||
# name based on order on the command line
|
||||
name = "cmd_scope_%d" % i
|
||||
name = f"cmd_scope_{i:d}"
|
||||
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
|
||||
|
||||
def create():
|
||||
def create() -> Configuration:
|
||||
"""Singleton Configuration instance.
|
||||
|
||||
This constructs one instance associated with this module and returns
|
||||
it. It is bundled inside a function so that configuration can be
|
||||
initialized lazily.
|
||||
|
||||
Return:
|
||||
(Configuration): object for accessing spack configuration
|
||||
|
||||
"""
|
||||
cfg = Configuration()
|
||||
|
||||
@@ -828,16 +839,25 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, lang.Singleton] = lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
"""Add updates to a config from a filename"""
|
||||
# Extract internal attributes, if we are dealing with an environment
|
||||
data = read_config_file(filename)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
if spack.schema.env.TOP_LEVEL_KEY in data:
|
||||
data = data[spack.schema.env.TOP_LEVEL_KEY]
|
||||
|
||||
msg = (
|
||||
"unexpected 'None' value when retrieving configuration. "
|
||||
"Please submit a bug-report at https://github.com/spack/spack/issues"
|
||||
)
|
||||
assert data is not None, msg
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
@@ -855,7 +875,7 @@ def add_from_file(filename, scope=None):
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
def add(fullpath: str, scope: Optional[str] = None) -> None:
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
components = process_config_path(fullpath)
|
||||
@@ -903,12 +923,12 @@ def add(fullpath, scope=None):
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
def get(path: str, default: Optional[Any] = None, scope: Optional[str] = None) -> Any:
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
"""Convenience function for setting single values in config files.
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
@@ -916,32 +936,113 @@ def set(path, value, scope=None):
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""
|
||||
Return list of writable scopes
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
list(
|
||||
x
|
||||
for x in CONFIG.scopes.values()
|
||||
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def writable_scope_names() -> List[str]:
|
||||
return list(x.name for x in writable_scopes())
|
||||
|
||||
|
||||
def matched_config(cfg_path: str) -> List[Tuple[str, Any]]:
|
||||
return [(scope, get(cfg_path, scope=scope)) for scope in writable_scope_names()]
|
||||
|
||||
|
||||
def change_or_add(
|
||||
section_name: str, find_fn: Callable[[str], bool], update_fn: Callable[[str], None]
|
||||
) -> None:
|
||||
"""Change or add a subsection of config, with additional logic to
|
||||
select a reasonable scope where the change is applied.
|
||||
|
||||
Search through config scopes starting with the highest priority:
|
||||
the first matching a criteria (determined by ``find_fn``) is updated;
|
||||
if no such config exists, find the first config scope that defines
|
||||
any config for the named section; if no scopes define any related
|
||||
config, then update the highest-priority config scope.
|
||||
"""
|
||||
configs_by_section = matched_config(section_name)
|
||||
|
||||
found = False
|
||||
for scope, section in configs_by_section:
|
||||
found = find_fn(section)
|
||||
if found:
|
||||
break
|
||||
|
||||
if found:
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scope meets the criteria specified by ``find_fn``,
|
||||
# then look for a scope that has any content (for the specified
|
||||
# section name)
|
||||
for scope, section in configs_by_section:
|
||||
if section:
|
||||
update_fn(section)
|
||||
found = True
|
||||
break
|
||||
|
||||
if found:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scopes define any config for the named section, then
|
||||
# modify the highest-priority scope.
|
||||
scope, section = configs_by_section[0]
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def update_all(section_name: str, change_fn: Callable[[str], bool]) -> None:
|
||||
"""Change a config section, which may have details duplicated
|
||||
across multiple scopes.
|
||||
"""
|
||||
configs_by_section = matched_config("develop")
|
||||
|
||||
for scope, section in configs_by_section:
|
||||
modified = change_fn(section)
|
||||
if modified:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def _validate_section_name(section: str) -> None:
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
f"Invalid config section: '{section}'. Options are: {' '.join(SECTION_SCHEMAS.keys())}"
|
||||
)
|
||||
|
||||
|
||||
def validate(data, schema, filename=None):
|
||||
def validate(
|
||||
data: YamlConfigDict, schema: YamlConfigDict, filename: Optional[str] = None
|
||||
) -> YamlConfigDict:
|
||||
"""Validate data read in from a Spack YAML file.
|
||||
|
||||
Arguments:
|
||||
data (dict or list): data read from a Spack YAML file
|
||||
schema (dict or list): jsonschema to validate data
|
||||
data: data read from a Spack YAML file
|
||||
schema: jsonschema to validate data
|
||||
|
||||
This leverages the line information (start_mark, end_mark) stored
|
||||
on Spack YAML structures.
|
||||
@@ -964,7 +1065,9 @@ def validate(data, schema, filename=None):
|
||||
return test_data
|
||||
|
||||
|
||||
def read_config_file(filename, schema=None):
|
||||
def read_config_file(
|
||||
filename: str, schema: Optional[YamlConfigDict] = None
|
||||
) -> Optional[YamlConfigDict]:
|
||||
"""Read a YAML configuration file.
|
||||
|
||||
User can provide a schema for validation. If no schema is provided,
|
||||
@@ -976,17 +1079,17 @@ def read_config_file(filename, schema=None):
|
||||
|
||||
if not os.path.exists(filename):
|
||||
# Ignore nonexistent files.
|
||||
tty.debug("Skipping nonexistent config path {0}".format(filename), level=3)
|
||||
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
|
||||
return None
|
||||
|
||||
elif not os.path.isfile(filename):
|
||||
raise ConfigFileError("Invalid configuration. %s exists but is not a file." % filename)
|
||||
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
|
||||
|
||||
elif not os.access(filename, os.R_OK):
|
||||
raise ConfigFileError("Config file is not readable: {0}".format(filename))
|
||||
raise ConfigFileError(f"Config file is not readable: {filename}")
|
||||
|
||||
try:
|
||||
tty.debug("Reading config from file {0}".format(filename))
|
||||
tty.debug(f"Reading config from file {filename}")
|
||||
with open(filename) as f:
|
||||
data = syaml.load_config(f)
|
||||
|
||||
@@ -1004,11 +1107,11 @@ def read_config_file(filename, schema=None):
|
||||
except syaml.SpackYAMLError as e:
|
||||
raise ConfigFileError(str(e)) from e
|
||||
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
|
||||
|
||||
|
||||
def _override(string):
|
||||
def _override(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
||||
@@ -1019,7 +1122,7 @@ def _override(string):
|
||||
return hasattr(string, "override") and string.override
|
||||
|
||||
|
||||
def _append(string):
|
||||
def _append(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
@@ -1033,7 +1136,7 @@ def _append(string):
|
||||
return getattr(string, "append", False)
|
||||
|
||||
|
||||
def _prepend(string):
|
||||
def _prepend(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
@@ -1105,7 +1208,7 @@ def get_valid_type(path):
|
||||
return types[schema_type]()
|
||||
else:
|
||||
return type(None)
|
||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||
raise ConfigError(f"Cannot determine valid type for path '{path}'.")
|
||||
|
||||
|
||||
def remove_yaml(dest, source):
|
||||
@@ -1233,7 +1336,7 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
def process_config_path(path):
|
||||
def process_config_path(path: str) -> List[str]:
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
@@ -1246,29 +1349,29 @@ def process_config_path(path):
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
raise syaml.SpackYAMLError(f"Illegal leading `:' in path `{path}'", "")
|
||||
seen_override_in_path = False
|
||||
while path:
|
||||
front, sep, path = path.partition(":")
|
||||
if (sep and not path) or path.startswith(":"):
|
||||
if seen_override_in_path:
|
||||
raise syaml.SpackYAMLError(
|
||||
"Meaningless second override" " indicator `::' in path `{0}'".format(path), ""
|
||||
f"Meaningless second override indicator `::' in path `{path}'", ""
|
||||
)
|
||||
path = path.lstrip(":")
|
||||
front = syaml.syaml_str(front)
|
||||
front.override = True
|
||||
front.override = True # type: ignore[attr-defined]
|
||||
seen_override_in_path = True
|
||||
|
||||
elif front.endswith("+"):
|
||||
front = front.rstrip("+")
|
||||
front = syaml.syaml_str(front)
|
||||
front.prepend = True
|
||||
front.prepend = True # type: ignore[attr-defined]
|
||||
|
||||
elif front.endswith("-"):
|
||||
front = front.rstrip("-")
|
||||
front = syaml.syaml_str(front)
|
||||
front.append = True
|
||||
front.append = True # type: ignore[attr-defined]
|
||||
|
||||
result.append(front)
|
||||
|
||||
@@ -1288,7 +1391,7 @@ def process_config_path(path):
|
||||
#
|
||||
# Settings for commands that modify configuration
|
||||
#
|
||||
def default_modify_scope(section="config"):
|
||||
def default_modify_scope(section: str = "config") -> str:
|
||||
"""Return the config scope that commands should modify by default.
|
||||
|
||||
Commands that modify configuration by default modify the *highest*
|
||||
@@ -1304,23 +1407,15 @@ def default_modify_scope(section="config"):
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
"""Return the config scope that is listed by default.
|
||||
|
||||
Commands that list configuration list *all* scopes (merged) by default.
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
def _update_in_memory(data, section):
|
||||
def _update_in_memory(data: YamlConfigDict, section: str) -> bool:
|
||||
"""Update the format of the configuration data in memory.
|
||||
|
||||
This function assumes the section is valid (i.e. validation
|
||||
is responsibility of the caller)
|
||||
|
||||
Args:
|
||||
data (dict): configuration data
|
||||
section (str): section of the configuration to update
|
||||
data: configuration data
|
||||
section: section of the configuration to update
|
||||
|
||||
Returns:
|
||||
True if the data was changed, False otherwise
|
||||
@@ -1330,14 +1425,14 @@ def _update_in_memory(data, section):
|
||||
return changed
|
||||
|
||||
|
||||
def ensure_latest_format_fn(section):
|
||||
def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
"""Return a function that takes as input a dictionary read from
|
||||
a configuration file and update it to the latest format.
|
||||
|
||||
The function returns True if there was any update, False otherwise.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration e.g. "packages",
|
||||
section: section of the configuration e.g. "packages",
|
||||
"config", etc.
|
||||
"""
|
||||
# The line below is based on the fact that every module we need
|
||||
@@ -1348,7 +1443,9 @@ def ensure_latest_format_fn(section):
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(*scopes_or_paths):
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
|
||||
@@ -1372,8 +1469,8 @@ def use_configuration(*scopes_or_paths):
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _config_from(scopes_or_paths):
|
||||
@lang.memoized
|
||||
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
||||
scopes = []
|
||||
for scope_or_path in scopes_or_paths:
|
||||
# If we have a config scope we are already done
|
||||
@@ -1383,7 +1480,7 @@ def _config_from(scopes_or_paths):
|
||||
|
||||
# Otherwise we need to construct it
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), '"{0}" must be a directory'.format(path)
|
||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
|
||||
@@ -1391,13 +1488,14 @@ def _config_from(scopes_or_paths):
|
||||
return configuration
|
||||
|
||||
|
||||
def raw_github_gitlab_url(url):
|
||||
def raw_github_gitlab_url(url: str) -> str:
|
||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||
|
||||
Args:
|
||||
url: url to be converted to raw form
|
||||
|
||||
Returns: (str) raw github/gitlab url or the original url
|
||||
Returns:
|
||||
Raw github/gitlab url or the original url
|
||||
"""
|
||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||
@@ -1450,7 +1548,7 @@ def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) ->
|
||||
|
||||
def _fetch_file(url):
|
||||
raw = raw_github_gitlab_url(url)
|
||||
tty.debug("Reading config from url {0}".format(raw))
|
||||
tty.debug(f"Reading config from url {raw}")
|
||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||
|
||||
if not url:
|
||||
@@ -1466,8 +1564,8 @@ def _fetch_file(url):
|
||||
basename = os.path.basename(config_url)
|
||||
if skip_existing and basename in existing_files:
|
||||
tty.warn(
|
||||
"Will not fetch configuration from {0} since a version already"
|
||||
"exists in {1}".format(config_url, dest_dir)
|
||||
f"Will not fetch configuration from {config_url} since a "
|
||||
f"version already exists in {dest_dir}"
|
||||
)
|
||||
path = os.path.join(dest_dir, basename)
|
||||
else:
|
||||
@@ -1479,7 +1577,7 @@ def _fetch_file(url):
|
||||
if paths:
|
||||
return dest_dir if len(paths) > 1 else paths[0]
|
||||
|
||||
raise ConfigFileError("Cannot retrieve configuration (yaml) from {0}".format(url))
|
||||
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
@@ -1497,7 +1595,13 @@ class ConfigFileError(ConfigError):
|
||||
class ConfigFormatError(ConfigError):
|
||||
"""Raised when a configuration format does not match its schema."""
|
||||
|
||||
def __init__(self, validation_error, data, filename=None, line=None):
|
||||
def __init__(
|
||||
self,
|
||||
validation_error,
|
||||
data: YamlConfigDict,
|
||||
filename: Optional[str] = None,
|
||||
line: Optional[int] = None,
|
||||
) -> None:
|
||||
# spack yaml has its own file/line marks -- try to find them
|
||||
# we prioritize these over the inputs
|
||||
self.validation_error = validation_error
|
||||
@@ -1511,11 +1615,11 @@ def __init__(self, validation_error, data, filename=None, line=None):
|
||||
# construct location
|
||||
location = "<unknown file>"
|
||||
if filename:
|
||||
location = "%s" % filename
|
||||
location = f"{filename}"
|
||||
if line is not None:
|
||||
location += ":%d" % line
|
||||
location += f":{line:d}"
|
||||
|
||||
message = "%s: %s" % (location, validation_error.message)
|
||||
message = f"{location}: {validation_error.message}"
|
||||
super().__init__(message)
|
||||
|
||||
def _get_mark(self, validation_error, data):
|
||||
|
||||
@@ -227,7 +227,7 @@ def read(path, apply_updates):
|
||||
if apply_updates and compilers:
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler])
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user