Compare commits
310 Commits
isolate-ut
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
bc4ecccfbf | ||
![]() |
9ee4876eb2 | ||
![]() |
d96f8efb9c | ||
![]() |
a2dc11acd3 | ||
![]() |
2f0df0131c | ||
![]() |
dd8941abc9 | ||
![]() |
b341030a0f | ||
![]() |
1d6cea6af2 | ||
![]() |
327a7a4031 | ||
![]() |
6ac75f47e8 | ||
![]() |
843346ce1b | ||
![]() |
23f03966b4 | ||
![]() |
4540980337 | ||
![]() |
ec9d08e71e | ||
![]() |
397c066464 | ||
![]() |
4d1b5d6a88 | ||
![]() |
0cae943b5c | ||
![]() |
78c6c607db | ||
![]() |
0da1fae709 | ||
![]() |
94fc2314f1 | ||
![]() |
05761de8c7 | ||
![]() |
ecdf3ff297 | ||
![]() |
ea7e3e4f9f | ||
![]() |
8371bb4e19 | ||
![]() |
0a5f2fc94d | ||
![]() |
45b2c207db | ||
![]() |
e7f897f959 | ||
![]() |
1aaab97a16 | ||
![]() |
3053e701c0 | ||
![]() |
20572fb87b | ||
![]() |
7e2e063979 | ||
![]() |
16e27ba4a6 | ||
![]() |
2fda288cc5 | ||
![]() |
9986652b27 | ||
![]() |
fd46923216 | ||
![]() |
bb2975b7f1 | ||
![]() |
1168f19e60 | ||
![]() |
5d50ad3941 | ||
![]() |
a43156a861 | ||
![]() |
ec2729706b | ||
![]() |
494d3f9002 | ||
![]() |
4f8b856145 | ||
![]() |
0eca79e7e4 | ||
![]() |
f245bde772 | ||
![]() |
4aee067bb0 | ||
![]() |
cc25a0e561 | ||
![]() |
3f063153f0 | ||
![]() |
aa350a4ed1 | ||
![]() |
e36bee41a0 | ||
![]() |
138d0c7a13 | ||
![]() |
a688479564 | ||
![]() |
5ead4c2d56 | ||
![]() |
2e18fbbdeb | ||
![]() |
02eafeee03 | ||
![]() |
812a43621b | ||
![]() |
0fe338b526 | ||
![]() |
3dc02e55e6 | ||
![]() |
7023edb37c | ||
![]() |
f1fdaca345 | ||
![]() |
d4454e54dc | ||
![]() |
969718d176 | ||
![]() |
0a9179fddb | ||
![]() |
b5b0a76991 | ||
![]() |
59b39f3eba | ||
![]() |
7a0c4e8017 | ||
![]() |
1ddf4ee6ba | ||
![]() |
12d0507cb7 | ||
![]() |
cf99912352 | ||
![]() |
9723fe88f5 | ||
![]() |
2439ff56a5 | ||
![]() |
2ef8d09fc7 | ||
![]() |
e5e767b300 | ||
![]() |
1c6b38f36d | ||
![]() |
091cd47caa | ||
![]() |
1ebf1a0c6c | ||
![]() |
56761649a2 | ||
![]() |
6a19cf1b42 | ||
![]() |
ef4274ed2e | ||
![]() |
88b8fc63ef | ||
![]() |
639a6a6897 | ||
![]() |
af96fef1da | ||
![]() |
7550a41660 | ||
![]() |
ffd2a34d9e | ||
![]() |
6a74a82e19 | ||
![]() |
ebb7c5ac8f | ||
![]() |
14c7bfe9ce | ||
![]() |
ed52b505d4 | ||
![]() |
b111064e22 | ||
![]() |
17d47accf9 | ||
![]() |
d7fb298a6b | ||
![]() |
107ea768ab | ||
![]() |
8797dd35f7 | ||
![]() |
0596a46cd9 | ||
![]() |
9d406463d4 | ||
![]() |
86906bf5b3 | ||
![]() |
03ddccbc93 | ||
![]() |
12db37906b | ||
![]() |
b158a15754 | ||
![]() |
b82f78003c | ||
![]() |
49616d3020 | ||
![]() |
8467f8ae8a | ||
![]() |
5b6137d91a | ||
![]() |
b7edcbecd7 | ||
![]() |
5ccbe68f16 | ||
![]() |
9fe4cef89e | ||
![]() |
165c6cef08 | ||
![]() |
0efd5287c4 | ||
![]() |
b1ab01280a | ||
![]() |
ab84876e2c | ||
![]() |
e2d5be83e7 | ||
![]() |
85cdf37d3b | ||
![]() |
06521b44b6 | ||
![]() |
1e5325eea0 | ||
![]() |
0995a29c5c | ||
![]() |
133d6e2656 | ||
![]() |
36117444aa | ||
![]() |
330a9a7c9a | ||
![]() |
0dc3fc2d21 | ||
![]() |
a972314fa6 | ||
![]() |
16d1ed3591 | ||
![]() |
5c25f16df2 | ||
![]() |
b3ccaa81a7 | ||
![]() |
a0041731a3 | ||
![]() |
a690b8c27c | ||
![]() |
a1fa862c3f | ||
![]() |
80f31829a8 | ||
![]() |
84436f10ba | ||
![]() |
660485709d | ||
![]() |
251dce05c9 | ||
![]() |
ecd05fdfb4 | ||
![]() |
9ffcf36444 | ||
![]() |
07258a7c80 | ||
![]() |
395e53a5e0 | ||
![]() |
77c331c753 | ||
![]() |
ee5481a861 | ||
![]() |
f7ec061c64 | ||
![]() |
7cb70ff4b1 | ||
![]() |
4a661f3255 | ||
![]() |
7037240879 | ||
![]() |
0e96dfaeef | ||
![]() |
a0a2cd6a1a | ||
![]() |
170c05bebb | ||
![]() |
bdf68b7ac0 | ||
![]() |
c176de94e2 | ||
![]() |
f63dbbe75d | ||
![]() |
a0c7b10c76 | ||
![]() |
2dc3bf0164 | ||
![]() |
9bf6e05d02 | ||
![]() |
cd283846af | ||
![]() |
03625c1c95 | ||
![]() |
f01774f1d4 | ||
![]() |
965860d1f8 | ||
![]() |
c4baf4e199 | ||
![]() |
dd82227ae7 | ||
![]() |
a9028630a5 | ||
![]() |
789c85ed8b | ||
![]() |
cf9d36fd64 | ||
![]() |
ef7ce46649 | ||
![]() |
334a50662f | ||
![]() |
d68e73d006 | ||
![]() |
7d7f097295 | ||
![]() |
37cdcc7172 | ||
![]() |
0a40bb72e8 | ||
![]() |
24b6edac89 | ||
![]() |
3e7acf3e61 | ||
![]() |
ede36512e7 | ||
![]() |
e06b169720 | ||
![]() |
7ed968d42c | ||
![]() |
c673b9245c | ||
![]() |
27c0dab5ca | ||
![]() |
b82bd8e6b6 | ||
![]() |
5351382501 | ||
![]() |
8c29e90fa9 | ||
![]() |
045f398f3d | ||
![]() |
6986e70877 | ||
![]() |
4d59e746fd | ||
![]() |
f6de34f9db | ||
![]() |
0f0adb71d0 | ||
![]() |
4ec958c5c6 | ||
![]() |
2aa07fa557 | ||
![]() |
239d343588 | ||
![]() |
44604708ad | ||
![]() |
f0109e4afe | ||
![]() |
b8f90e1bdc | ||
![]() |
8b9064e5e4 | ||
![]() |
ce79785c10 | ||
![]() |
af378c7f31 | ||
![]() |
cf50bfb7c2 | ||
![]() |
620e090ff5 | ||
![]() |
c4d86a9c2e | ||
![]() |
3b74b894c7 | ||
![]() |
3fa8afc036 | ||
![]() |
60628075cb | ||
![]() |
9e4fab277b | ||
![]() |
5588e328f7 | ||
![]() |
93a1fc90c9 | ||
![]() |
7297721e78 | ||
![]() |
eb57d96ea9 | ||
![]() |
ce09642922 | ||
![]() |
cd88eb1ed0 | ||
![]() |
826df84baf | ||
![]() |
0a4b365a7d | ||
![]() |
a2ed4704e7 | ||
![]() |
28b49d5d2f | ||
![]() |
16bb4c360a | ||
![]() |
cfd58bdafe | ||
![]() |
53493ceab1 | ||
![]() |
64cd429cc8 | ||
![]() |
525809632e | ||
![]() |
a6c32c80ab | ||
![]() |
57ad848f47 | ||
![]() |
15623d8077 | ||
![]() |
c352db7645 | ||
![]() |
5d999d0e4f | ||
![]() |
694a1ff340 | ||
![]() |
4ec451cfed | ||
![]() |
a77eca7f88 | ||
![]() |
14ac2b063a | ||
![]() |
edf4d6659d | ||
![]() |
6531fbf425 | ||
![]() |
0a6045eadf | ||
![]() |
5722a13af0 | ||
![]() |
9f1223e7a3 | ||
![]() |
5beef28444 | ||
![]() |
e618a93f3d | ||
![]() |
3f0ec5c580 | ||
![]() |
14392efc6d | ||
![]() |
d7406aaaa5 | ||
![]() |
5a7e691ae2 | ||
![]() |
b9f63ab40b | ||
![]() |
4417b1f9ee | ||
![]() |
04f14166cb | ||
![]() |
223a54098e | ||
![]() |
ea505e2d26 | ||
![]() |
e2b51e01be | ||
![]() |
a04ee77f77 | ||
![]() |
bb03ce7281 | ||
![]() |
31640652c7 | ||
![]() |
0ff0e8944e | ||
![]() |
a877d812d0 | ||
![]() |
24a59ffd36 | ||
![]() |
57f46f0375 | ||
![]() |
7d45e132a6 | ||
![]() |
e7ac676417 | ||
![]() |
94ba152ef5 | ||
![]() |
5404a5bb82 | ||
![]() |
b522d8f610 | ||
![]() |
2a57c11d28 | ||
![]() |
1aa3a641ee | ||
![]() |
6feba1590c | ||
![]() |
58a7912435 | ||
![]() |
03ae2eb223 | ||
![]() |
013f0d3a13 | ||
![]() |
3e68aa0b2f | ||
![]() |
1da0d0342b | ||
![]() |
6f7d91aebf | ||
![]() |
071c74d185 | ||
![]() |
51435d6d69 | ||
![]() |
8ce110e069 | ||
![]() |
90aee11c33 | ||
![]() |
4fc73bd7f3 | ||
![]() |
f7fc4b201d | ||
![]() |
84999b6996 | ||
![]() |
b0f193071d | ||
![]() |
d1c3374ccb | ||
![]() |
cba8ba0466 | ||
![]() |
d50f8d7b19 | ||
![]() |
969fbbfb5a | ||
![]() |
1cd5397b12 | ||
![]() |
1829dbd7b6 | ||
![]() |
9e3b231e6f | ||
![]() |
5911a677d4 | ||
![]() |
bb60bb4f7a | ||
![]() |
ddec75315e | ||
![]() |
8bcb1f8766 | ||
![]() |
5a0ac4ba94 | ||
![]() |
673689d53b | ||
![]() |
ace8e17f02 | ||
![]() |
eb9c63541a | ||
![]() |
b9f4d9f6fc | ||
![]() |
eda3522ce8 | ||
![]() |
3cefd73fcc | ||
![]() |
3547bcb517 | ||
![]() |
53b528f649 | ||
![]() |
798770f9e5 | ||
![]() |
4a920243a0 | ||
![]() |
8727195b84 | ||
![]() |
456f2ca40f | ||
![]() |
b4258aaa25 | ||
![]() |
5d9647544a | ||
![]() |
1fdb6a3e7e | ||
![]() |
7c77b3a4b2 | ||
![]() |
eb4b8292b6 | ||
![]() |
16bc58ea49 | ||
![]() |
6028ce8bc1 | ||
![]() |
349e7e4c37 | ||
![]() |
a982118c1f | ||
![]() |
40d12ed7e2 | ||
![]() |
9e0720207a | ||
![]() |
88e738c343 | ||
![]() |
8bbc2e2ade | ||
![]() |
1509e54435 | ||
![]() |
ca164d6619 | ||
![]() |
a632576231 | ||
![]() |
70b16cfb59 | ||
![]() |
1d89d4dc13 | ||
![]() |
bc8a0f56ed | ||
![]() |
4e09396f8a | ||
![]() |
0d488c6e4f | ||
![]() |
50e76bc3d3 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@e6428a5c4e294a61438ed7f43155db912025b6b3
|
||||
- uses: docker/metadata-action@9dc751fe249ad99385a2583ee0d084c400eee04e
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==23.11.0
|
||||
black==23.12.0
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
isort==5.13.2
|
||||
mypy==1.7.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
||||
|
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -194,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
44
README.md
44
README.md
@@ -1,13 +1,34 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
<div align="left">
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
<h2>
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-white-text.svg" width="250">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
<img alt="Spack" src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
</picture>
|
||||
|
||||
<br>
|
||||
<br clear="all">
|
||||
|
||||
<a href="https://github.com/spack/spack/actions/workflows/ci.yml"><img src="https://github.com/spack/spack/workflows/ci/badge.svg" alt="CI Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/bootstrapping.yml"><img src="https://github.com/spack/spack/actions/workflows/bootstrap.yml/badge.svg" alt="Bootstrap Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/build-containers.yml"><img src="https://github.com/spack/spack/actions/workflows/build-containers.yml/badge.svg" alt="Containers Status"></a>
|
||||
<a href="https://spack.readthedocs.io"><img src="https://readthedocs.org/projects/spack/badge/?version=latest" alt="Documentation Status"></a>
|
||||
<a href="https://codecov.io/gh/spack/spack"><img src="https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg" alt="Code coverage"/></a>
|
||||
<a href="https://slack.spack.io"><img src="https://slack.spack.io/badge.svg" alt="Slack"/></a>
|
||||
<a href="https://matrix.to/#/#spack-space:matrix.org"><img src="https://img.shields.io/matrix/spack-space%3Amatrix.org?label=matrix" alt="Matrix"/></a>
|
||||
|
||||
</h2>
|
||||
|
||||
**[Getting Started] • [Config] • [Community] • [Contributing] • [Packaging Guide]**
|
||||
|
||||
[Getting Started]: https://spack.readthedocs.io/en/latest/getting_started.html
|
||||
[Config]: https://spack.readthedocs.io/en/latest/configuration.html
|
||||
[Community]: #community
|
||||
[Contributing]: https://spack.readthedocs.io/en/latest/contribution_guide.html
|
||||
[Packaging Guide]: https://spack.readthedocs.io/en/latest/packaging_guide.html
|
||||
|
||||
</div>
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -66,10 +87,11 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
||||
Contributing
|
||||
------------------------
|
||||
|
@@ -153,7 +153,43 @@ keyring, and trusting all downloaded keys.
|
||||
List of popular build caches
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_'
|
||||
|
||||
-------------------
|
||||
Build cache signing
|
||||
-------------------
|
||||
|
||||
By default, Spack will add a cryptographic signature to each package pushed to
|
||||
a build cache, and verifies the signature when installing from a build cache.
|
||||
|
||||
Keys for signing can be managed with the :ref:`spack gpg <cmd-spack-gpg>` command,
|
||||
as well as ``spack buildcache keys`` as mentioned above.
|
||||
|
||||
You can disable signing when pushing with ``spack buildcache push --unsigned``,
|
||||
and disable verification when installing from any build cache with
|
||||
``spack install --no-check-signature``.
|
||||
|
||||
Alternatively, signing and verification can be enabled or disabled on a per build cache
|
||||
basis:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --signed <name> <url> # enable signing and verification
|
||||
$ spack mirror add --unsigned <name> <url> # disable signing and verification
|
||||
|
||||
$ spack mirror set --signed <name> # enable signing and verification for an existing mirror
|
||||
$ spack mirror set --unsigned <name> # disable signing and verification for an existing mirror
|
||||
|
||||
Or you can directly edit the ``mirrors.yaml`` configuration file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mirrors:
|
||||
<name>:
|
||||
url: <url>
|
||||
signed: false # disable signing and verification
|
||||
|
||||
See also :ref:`mirrors`.
|
||||
|
||||
----------
|
||||
Relocation
|
||||
@@ -251,87 +287,13 @@ To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
* `spack/setup-spack <https://github.com/spack/setup-spack>`_ for setting up Spack in GitHub
|
||||
Actions
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_ for
|
||||
more details on the public build cache
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
@@ -90,7 +90,7 @@ and optimizers do require a paid license. In Spack, they are packaged as:
|
||||
TODO: Confirm and possible change(!) the scope of MPI components (runtime
|
||||
vs. devel) in current (and previous?) *cluster/professional/composer*
|
||||
editions, i.e., presence in downloads, possibly subject to license
|
||||
coverage(!); see `disussion in PR #4300
|
||||
coverage(!); see `discussion in PR #4300
|
||||
<https://github.com/spack/spack/pull/4300#issuecomment-305582898>`_. [NB:
|
||||
An "mpi" subdirectory is not indicative of the full MPI SDK being present
|
||||
(i.e., ``mpicc``, ..., and header files). The directory may just as well
|
||||
|
@@ -9,34 +9,96 @@
|
||||
Container Images
|
||||
================
|
||||
|
||||
Spack :ref:`environments` are a great tool to create container images, but
|
||||
preparing one that is suitable for production requires some more boilerplate
|
||||
than just:
|
||||
Spack :ref:`environments` can easily be turned into container images. This page
|
||||
outlines two ways in which this can be done:
|
||||
|
||||
1. By installing the environment on the host system, and copying the installations
|
||||
into the container image. This approach does not require any tools like Docker
|
||||
or Singularity to be installed.
|
||||
2. By generating a Docker or Singularity recipe that can be used to build the
|
||||
container image. In this approach, Spack builds the software inside the
|
||||
container runtime, not on the host system.
|
||||
|
||||
The first approach is easiest if you already have an installed environment,
|
||||
the second approach gives more control over the container image.
|
||||
|
||||
---------------------------
|
||||
From existing installations
|
||||
---------------------------
|
||||
|
||||
If you already have a Spack environment installed on your system, you can
|
||||
share the binaries as an OCI compatible container image. To get started you
|
||||
just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Create and install an environment in the current directory
|
||||
spack env create -d .
|
||||
spack -e . add pkg-a pkg-b
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ docker run -it example.com/name/image:my_env
|
||||
|
||||
The image generated by Spack consists of the specified base image with each package from the
|
||||
environment as a separate layer on top. The image is minimal by construction, it only contains the
|
||||
environment roots and its runtime dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
|
||||
the password for your account, but a personal access token you need to generate separately.
|
||||
|
||||
The specified ``--base-image`` should have a libc that is compatible with the host system.
|
||||
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``
|
||||
or newer: the libc in the container image must be at least the version of the host system,
|
||||
assuming ABI compatibility. It is also perfectly fine to use a completely different
|
||||
Linux distribution as long as the libc is compatible.
|
||||
|
||||
For convenience, Spack also turns the OCI registry into a :ref:`build cache <binary_caches_oci>`,
|
||||
so that future ``spack install`` of the environment will simply pull the binaries from the
|
||||
registry instead of doing source builds. The flag ``--update-index`` is needed to make Spack
|
||||
take the build cache into account when concretizing.
|
||||
|
||||
.. note::
|
||||
|
||||
When generating container images in CI, the approach above is recommended when CI jobs
|
||||
already run in a sandboxed environment. You can simply use ``spack`` directly
|
||||
in the CI job and push the resulting image to a registry. Subsequent CI jobs should
|
||||
run faster because Spack can install from the same registry instead of rebuilding from
|
||||
sources.
|
||||
|
||||
---------------------------------------------
|
||||
Generating recipes for Docker and Singularity
|
||||
---------------------------------------------
|
||||
|
||||
Apart from copying existing installations into container images, Spack can also
|
||||
generate recipes for container images. This is useful if you want to run Spack
|
||||
itself in a sandboxed environment instead of on the host system.
|
||||
|
||||
Since recipes need a little bit more boilerplate than
|
||||
|
||||
.. code-block:: docker
|
||||
|
||||
COPY spack.yaml /environment
|
||||
RUN spack -e /environment install
|
||||
|
||||
Additional actions may be needed to minimize the size of the
|
||||
container, or to update the system software that is installed in the base
|
||||
image, or to set up a proper entrypoint to run the image. These tasks are
|
||||
usually both necessary and repetitive, so Spack comes with a command
|
||||
to generate recipes for container images starting from a ``spack.yaml``.
|
||||
Spack provides a command to generate customizable recipes for container images. Customizations
|
||||
include minimizing the size of the image, installing packages in the base image using the system
|
||||
package manager, and setting up a proper entrypoint to run the image.
|
||||
|
||||
.. seealso::
|
||||
|
||||
This page is a reference for generating recipes to build container images.
|
||||
It means that your environment is built from scratch inside the container
|
||||
runtime.
|
||||
|
||||
Since v0.21, Spack can also create container images from existing package installations
|
||||
on your host system. See :ref:`binary_caches_oci` for more information on
|
||||
that topic.
|
||||
|
||||
--------------------
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
A Quick Introduction
|
||||
--------------------
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Consider having a Spack environment like the following:
|
||||
|
||||
@@ -47,8 +109,8 @@ Consider having a Spack environment like the following:
|
||||
- gromacs+mpi
|
||||
- mpich
|
||||
|
||||
Producing a ``Dockerfile`` from it is as simple as moving to the directory
|
||||
where the ``spack.yaml`` file is stored and giving the following command:
|
||||
Producing a ``Dockerfile`` from it is as simple as changing directories to
|
||||
where the ``spack.yaml`` file is stored and running the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -114,9 +176,9 @@ configuration are discussed in details in the sections below.
|
||||
|
||||
.. _container_spack_images:
|
||||
|
||||
--------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Spack Images on Docker Hub
|
||||
--------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
@@ -186,9 +248,9 @@ by Spack use them as default base images for their ``build`` stage,
|
||||
even though handles to use custom base images provided by users are
|
||||
available to accommodate complex use cases.
|
||||
|
||||
---------------------------------
|
||||
Creating Images From Environments
|
||||
---------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuring the Container Recipe
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Any Spack Environment can be used for the automatic generation of container
|
||||
recipes. Sensible defaults are provided for things like the base image or the
|
||||
@@ -229,18 +291,18 @@ under the ``container`` attribute of environments:
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
Setting Base Images
|
||||
-------------------
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
Use Official Spack Images From Dockerhub
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
To generate a recipe that uses an official Docker image from the
|
||||
Spack organization to build the software and the corresponding official OS image
|
||||
@@ -445,9 +507,9 @@ responsibility to ensure that:
|
||||
Therefore we don't recommend its use in cases that can be otherwise
|
||||
covered by the simplified mode shown first.
|
||||
|
||||
----------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Singularity Definition Files
|
||||
----------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In addition to producing recipes in ``Dockerfile`` format Spack can produce
|
||||
Singularity Definition Files by just changing the value of the ``format``
|
||||
@@ -468,9 +530,9 @@ attribute:
|
||||
The minimum version of Singularity required to build a SIF (Singularity Image Format)
|
||||
image from the recipes generated by Spack is ``3.5.3``.
|
||||
|
||||
------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Extending the Jinja2 Templates
|
||||
------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The Dockerfile and the Singularity definition file that Spack can generate are based on
|
||||
a few Jinja2 templates that are rendered according to the environment being containerized.
|
||||
@@ -591,9 +653,9 @@ The recipe that gets generated contains the two extra instruction that we added
|
||||
|
||||
.. _container_config_options:
|
||||
|
||||
-----------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuration Reference
|
||||
-----------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The tables below describe all the configuration options that are currently supported
|
||||
to customize the generation of container recipes:
|
||||
@@ -690,13 +752,13 @@ to customize the generation of container recipes:
|
||||
- Description string
|
||||
- No
|
||||
|
||||
--------------
|
||||
~~~~~~~~~~~~~~
|
||||
Best Practices
|
||||
--------------
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
^^^
|
||||
"""
|
||||
MPI
|
||||
^^^
|
||||
"""
|
||||
Due to the dependency on Fortran for OpenMPI, which is the spack default
|
||||
implementation, consider adding ``gfortran`` to the ``apt-get install`` list.
|
||||
|
||||
@@ -707,9 +769,9 @@ For execution on HPC clusters, it can be helpful to import the docker
|
||||
image into Singularity in order to start a program with an *external*
|
||||
MPI. Otherwise, also add ``openssh-server`` to the ``apt-get install`` list.
|
||||
|
||||
^^^^
|
||||
""""
|
||||
CUDA
|
||||
^^^^
|
||||
""""
|
||||
Starting from CUDA 9.0, Nvidia provides minimal CUDA images based on
|
||||
Ubuntu. Please see `their instructions <https://hub.docker.com/r/nvidia/cuda/>`_.
|
||||
Avoid double-installing CUDA by adding, e.g.
|
||||
@@ -728,9 +790,9 @@ to your ``spack.yaml``.
|
||||
Users will either need ``nvidia-docker`` or e.g. Singularity to *execute*
|
||||
device kernels.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
Docker on Windows and OSX
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
On Mac OS and Windows, docker runs on a hypervisor that is not allocated much
|
||||
memory by default, and some spack packages may fail to build due to lack of
|
||||
|
@@ -9,46 +9,42 @@
|
||||
Custom Extensions
|
||||
=================
|
||||
|
||||
*Spack extensions* permit you to extend Spack capabilities by deploying your
|
||||
*Spack extensions* allow you to extend Spack capabilities by deploying your
|
||||
own custom commands or logic in an arbitrary location on your filesystem.
|
||||
This might be extremely useful e.g. to develop and maintain a command whose purpose is
|
||||
too specific to be considered for reintegration into the mainline or to
|
||||
evolve a command through its early stages before starting a discussion to merge
|
||||
it upstream.
|
||||
|
||||
From Spack's point of view an extension is any path in your filesystem which
|
||||
respects a prescribed naming and layout for files:
|
||||
respects the following naming and layout for files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
|
||||
├── pytest.ini # Optional file if the extension ships its own tests
|
||||
├── scripting # Folder that may contain modules that are needed for the extension commands
|
||||
│ └── cmd # Folder containing extension commands
|
||||
│ └── filter.py # A new command that will be available
|
||||
├── tests # Tests for this extension
|
||||
│ ├── cmd # Folder containing extension commands
|
||||
│ │ └── filter.py # A new command that will be available
|
||||
│ └── functions.py # Module with internal details
|
||||
└── tests # Tests for this extension
|
||||
│ ├── conftest.py
|
||||
│ └── test_filter.py
|
||||
└── templates # Templates that may be needed by the extension
|
||||
|
||||
In the example above the extension named *scripting* adds an additional command (``filter``)
|
||||
and unit tests to verify its behavior. The code for this example can be
|
||||
obtained by cloning the corresponding git repository:
|
||||
In the example above, the extension is named *scripting*. It adds an additional command
|
||||
(``spack filter``) and unit tests to verify its behavior.
|
||||
|
||||
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
|
||||
The extension can import any core Spack module in its implementation. When loaded by
|
||||
the ``spack`` command, the extension itself is imported as a Python package in the
|
||||
``spack.extensions`` namespace. In the example above, since the extension is named
|
||||
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
|
||||
|
||||
The code for this example extension can be obtained by cloning the corresponding git repository:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd ~/
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
remote: Counting objects: 11, done.
|
||||
remote: Compressing objects: 100% (7/7), done.
|
||||
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
|
||||
Receiving objects: 100% (11/11), done.
|
||||
|
||||
As you can see by inspecting the sources, Python modules that are part of the extension
|
||||
can import any core Spack module.
|
||||
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
|
||||
|
||||
---------------------------------
|
||||
Configure Spack to Use Extensions
|
||||
@@ -61,7 +57,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- ~/tmp/spack-scripting
|
||||
- /tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
@@ -86,37 +82,32 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unit-test --extension=scripting
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
|
||||
========================================== test session starts ===========================================
|
||||
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
|
||||
rootdir: /home/culpo/github/spack-scripting
|
||||
configfile: pytest.ini
|
||||
testpaths: tests
|
||||
plugins: xdist-3.5.0
|
||||
collected 5 items
|
||||
|
||||
tests/test_filter.py ...XX
|
||||
============================================================ short test summary info =============================================================
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
tests/test_filter.py ..... [100%]
|
||||
|
||||
=========================================================== slowest 20 test durations ============================================================
|
||||
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================
|
||||
========================================== slowest 30 durations ==========================================
|
||||
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
|
@@ -111,3 +111,28 @@ CUDA is split into fewer components and is simpler to specify:
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
Using an External OpenGL API
|
||||
-----------------------------------
|
||||
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
|
||||
|
||||
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
|
||||
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libglx:
|
||||
require: [opengl]
|
||||
opengl:
|
||||
buildable: false
|
||||
externals:
|
||||
- prefix: /usr/
|
||||
spec: opengl@4.6
|
||||
|
||||
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
|
||||
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.
|
||||
|
@@ -97,6 +97,35 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extra attributes for external packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes external packages require additional attributes to be used
|
||||
effectively. This information can be defined on a per-package basis
|
||||
and stored in the ``extra_attributes`` section of the external package
|
||||
configuration. In addition to per-package information, this section
|
||||
can be used to define environment modifications to be performed
|
||||
whenever the package is used. For example, if an external package is
|
||||
built without ``rpath`` support, it may require ``LD_LIBRARY_PATH``
|
||||
settings to find its dependencies. This could be configured as
|
||||
follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: mpich@3.3 %clang@12.0.0 +hwloc
|
||||
prefix: /path/to/mpich
|
||||
extra_attributes:
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /path/to/hwloc/lib64
|
||||
|
||||
See :ref:`configuration_environment_variables` for more information on
|
||||
how to configure environment modifications in Spack config files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -5284,7 +5284,7 @@ installed example.
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after runnig the tests
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -5781,7 +5781,7 @@ with those implemented in the package itself.
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortan
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
|
@@ -7,7 +7,7 @@ docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
isort==5.13.2
|
||||
black==23.12.0
|
||||
flake8==6.1.0
|
||||
mypy==1.7.1
|
||||
|
@@ -142,7 +142,7 @@ Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
development and release packages. Only one key pair exists in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
@@ -272,7 +272,7 @@ Internal Implementation
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the technical
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
@@ -295,7 +295,7 @@ infrastructure.
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
run in AWS, and one key for each affiliated institution (e.g. University of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
@@ -305,7 +305,7 @@ contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
built by a protected job running on an affiliated institution's infrastructure
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
|
@@ -1047,9 +1047,9 @@ def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
return GroupedExceptionForwarder(context, self, base)
|
||||
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
@@ -1072,15 +1072,18 @@ class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
self._base = base
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
if not issubclass(exc_type, self._base):
|
||||
return False
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
|
@@ -726,13 +726,46 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
"""Reports issues with 'depends_on' directives.
|
||||
|
||||
Issues might be unknown dependencies, unknown variants or variant values, or declaration
|
||||
of nested dependencies.
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
for when, dependency_edge in dependency_data.items():
|
||||
dependency_spec = dependency_edge.spec
|
||||
nested_dependencies = dependency_spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = (
|
||||
f"{pkg_name}: invalid nested dependency "
|
||||
f"declaration '{str(dependency_spec)}'"
|
||||
)
|
||||
details = [
|
||||
f"split depends_on('{str(dependency_spec)}', when='{str(when)}') "
|
||||
f"into {len(nested_dependencies) + 1} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
for s in (dependency_spec, when):
|
||||
if s.virtual and s.variants:
|
||||
summary = f"{pkg_name}: virtual dependency cannot have variants"
|
||||
details = [
|
||||
f"remove variants from '{str(s)}' in depends_on directive",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -1605,14 +1605,14 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
||||
return spec_dict, layout_version
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
@@ -1633,7 +1633,9 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1651,8 +1653,16 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
mirror_urls = try_first + try_next
|
||||
|
||||
mirrors = try_first + try_next
|
||||
# TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here.
|
||||
def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
@@ -1661,14 +1671,17 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# Override mirror's default if
|
||||
currently_unsigned = unsigned if unsigned is not None else not mirror.signed
|
||||
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
fetch_url = mirror.fetch_url
|
||||
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
if fetch_url.startswith("oci://"):
|
||||
ref = spack.oci.image.ImageReference.from_string(
|
||||
fetch_url[len("oci://") :]
|
||||
).with_tag(spack.oci.image.default_tag(spec))
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
@@ -1705,7 +1718,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
@@ -1727,13 +1740,16 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix)
|
||||
specfile_path = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix
|
||||
)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
@@ -1746,21 +1762,21 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
|
||||
if try_signed and not unsigned:
|
||||
if try_signed and not currently_unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
tty.warn(f"Failed to verify: {specfile_url}")
|
||||
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
if currently_unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
@@ -1783,6 +1799,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
@@ -1981,7 +1998,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, ".spack")
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
@@ -2001,7 +2018,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if signature_required:
|
||||
if os.path.exists("%s.asc" % specfile_path):
|
||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||
try:
|
||||
@@ -2050,7 +2067,7 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
@@ -2076,7 +2093,8 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified = download_result["signature_verified"]
|
||||
signature_verified: bool = download_result["signature_verified"]
|
||||
signature_required: bool = download_result["signature_required"]
|
||||
tmpdir = None
|
||||
|
||||
if layout_version == 0:
|
||||
@@ -2085,7 +2103,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, signature_required, bchecksum
|
||||
)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
@@ -2098,9 +2118,10 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
if signature_required and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
"To install unsigned packages, use the --no-check-signature option, "
|
||||
"or configure the mirror with signed: false."
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
@@ -2213,7 +2234,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
extract_tarball(spec, download_result, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
@@ -206,17 +207,19 @@ def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
elif platform == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
else:
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
@@ -386,7 +386,7 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
@@ -441,7 +441,7 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
|
@@ -19,7 +19,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
@@ -86,12 +85,9 @@ def __init__(self) -> None:
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
@@ -100,11 +96,9 @@ def update_installations(self) -> None:
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
if sys.platform == "win32":
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
self.install_all()
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
@@ -122,25 +116,6 @@ def update_syspath_and_environ(self) -> None:
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self) -> None:
|
||||
model = depfile.MakefileModel.from_env(self)
|
||||
template = spack.tengine.make_environment().get_template(
|
||||
os.path.join("depfile", "Makefile")
|
||||
)
|
||||
makefile = self.environment_root() / "Makefile"
|
||||
makefile.write_text(template.render(model.to_dict()))
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
|
@@ -66,7 +66,6 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
|
@@ -1032,6 +1032,11 @@ def get_env_modifications(self) -> EnvironmentModifications:
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(run_env_mods, spec)
|
||||
pkg.setup_run_environment(run_env_mods)
|
||||
|
||||
external_env = (dspec.extra_attributes or {}).get("environment", {})
|
||||
if external_env:
|
||||
run_env_mods.extend(spack.schema.environment.parse(external_env))
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
# Don't let the runtime environment of comiler like dependencies leak into the
|
||||
# build env
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
@@ -285,6 +286,19 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -292,6 +306,9 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
@@ -10,13 +10,12 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(PackageBase):
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages that are built using Perl."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -61,6 +60,30 @@ class PerlBuilder(BaseBuilder):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def build_method(self):
|
||||
"""Searches the package for either a Makefile.PL or Build.PL.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||
"""
|
||||
if os.path.isfile("Makefile.PL"):
|
||||
build_method = "Makefile.PL"
|
||||
elif os.path.isfile("Build.PL"):
|
||||
build_method = "Build.PL"
|
||||
else:
|
||||
raise RuntimeError("Unknown build_method for perl package")
|
||||
return build_method
|
||||
|
||||
@property
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = inspect.getmodule(self.pkg).make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
|
||||
def configure_args(self):
|
||||
"""List of arguments passed to :py:meth:`~.PerlBuilder.configure`.
|
||||
|
||||
@@ -73,19 +96,7 @@ def configure(self, pkg, spec, prefix):
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||
"""
|
||||
if os.path.isfile("Makefile.PL"):
|
||||
self.build_method = "Makefile.PL"
|
||||
self.build_executable = inspect.getmodule(self.pkg).make
|
||||
elif os.path.isfile("Build.PL"):
|
||||
self.build_method = "Build.PL"
|
||||
self.build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
else:
|
||||
raise RuntimeError("Unknown build_method for perl package")
|
||||
|
||||
if self.build_method == "Makefile.PL":
|
||||
options = ["Makefile.PL", "INSTALL_BASE={0}".format(prefix)]
|
||||
elif self.build_method == "Build.PL":
|
||||
|
@@ -6,13 +6,14 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from typing import Iterable, List, Mapping, Optional
|
||||
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -25,14 +26,18 @@
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary):
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
@@ -50,7 +55,7 @@ class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
def import_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that the Python package provides.
|
||||
|
||||
These are used to test whether or not the installation succeeded.
|
||||
@@ -65,7 +70,7 @@ def import_modules(self):
|
||||
detected, this property can be overridden by the package.
|
||||
|
||||
Returns:
|
||||
list: list of strings of module names
|
||||
List of strings of module names.
|
||||
"""
|
||||
modules = []
|
||||
pkg = self.spec["python"].package
|
||||
@@ -102,14 +107,14 @@ def import_modules(self):
|
||||
return modules
|
||||
|
||||
@property
|
||||
def skip_modules(self):
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of import_modules. If a module has submodules,
|
||||
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||
|
||||
Returns:
|
||||
list: list of strings of module names
|
||||
List of strings of module names.
|
||||
"""
|
||||
return []
|
||||
|
||||
@@ -185,12 +190,12 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test_imports(self):
|
||||
def test_imports(self) -> None:
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -315,24 +320,27 @@ class PythonPackage(PythonExtension):
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/project/" + name + "/"
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -350,7 +358,7 @@ def headers(self):
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
def libs(self) -> LibraryList:
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -384,7 +392,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["test"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls):
|
||||
def std_args(cls) -> List[str]:
|
||||
return [
|
||||
# Verbose
|
||||
"-vvv",
|
||||
@@ -409,7 +417,7 @@ def std_args(cls):
|
||||
]
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""The root directory of the Python package.
|
||||
|
||||
This is usually the directory containing one of the following files:
|
||||
@@ -420,51 +428,51 @@ def build_directory(self):
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
def config_settings(self, spec: Spec, prefix: Prefix) -> Mapping[str, object]:
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
dict: Possibly nested dictionary of KEY, VALUE settings
|
||||
Possibly nested dictionary of KEY, VALUE settings.
|
||||
"""
|
||||
return {}
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
def install_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
list: list of options
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
list: list of options
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
@@ -108,6 +108,8 @@ class ROCmPackage(PackageBase):
|
||||
"gfx90a:xnack+",
|
||||
"gfx90c",
|
||||
"gfx940",
|
||||
"gfx941",
|
||||
"gfx942",
|
||||
"gfx1010",
|
||||
"gfx1011",
|
||||
"gfx1012",
|
||||
@@ -168,6 +170,8 @@ def hip_flags(amdgpu_target):
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
|
||||
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
|
||||
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
|
||||
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
|
||||
|
@@ -1253,6 +1253,7 @@ def main_script_replacements(cmd):
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
cleanup_job["dependencies"] = []
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
@@ -1276,6 +1277,7 @@ def main_script_replacements(cmd):
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -1296,6 +1298,7 @@ def main_script_replacements(cmd):
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
final_job["interruptible"] = True
|
||||
final_job["dependencies"] = []
|
||||
|
||||
output_object["rebuild-index"] = final_job
|
||||
|
||||
|
@@ -6,10 +6,8 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -147,89 +145,37 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags:
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
def quote_kvp(string: str) -> str:
|
||||
"""For strings like ``name=value`` or ``name==value``, quote and escape the value if needed.
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
This is a compromise to respect quoting of key-value pairs on the CLI. The shell
|
||||
strips quotes from quoted arguments, so we cannot know *exactly* how CLI arguments
|
||||
were quoted. To compensate, we re-add quotes around anything staritng with ``name=``
|
||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||
"""
|
||||
match = spack.parser.SPLIT_KVP.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self) -> str:
|
||||
single_errors = [
|
||||
"({0}) {1} {2} => {3}".format(
|
||||
i + 1,
|
||||
match.group(0),
|
||||
next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent(
|
||||
"""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}"""
|
||||
).format("\n".join(single_errors))
|
||||
key, delim, value = match.groups()
|
||||
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
"""
|
||||
concretize = kwargs.get("concretize", False)
|
||||
normalize = kwargs.get("normalize", False)
|
||||
tests = kwargs.get("tests", False)
|
||||
args = [args] if isinstance(args, str) else args
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, str):
|
||||
sargs = " ".join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
specs = spack.parser.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
# Unquoted flags will be read as a variant or hash
|
||||
if unquoted_flags and ("variant" in msg or "hash" in msg):
|
||||
msg += "\n\n"
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg) from e
|
||||
specs = spack.parser.parse(arg_string)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
@@ -37,6 +37,7 @@
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
@@ -76,7 +77,19 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
|
||||
"--unsigned",
|
||||
"-u",
|
||||
action="store_false",
|
||||
dest="signed",
|
||||
default=None,
|
||||
help="push unsigned buildcache tarballs",
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--signed",
|
||||
action="store_true",
|
||||
dest="signed",
|
||||
default=None,
|
||||
help="push signed buildcache tarballs",
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
@@ -110,7 +123,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--base-image", default=None, help="specify the base image for the buildcache. "
|
||||
"--base-image", default=None, help="specify the base image for the buildcache"
|
||||
)
|
||||
push.add_argument(
|
||||
"--tag",
|
||||
"-t",
|
||||
default=None,
|
||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||
"runtime dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
@@ -188,14 +208,16 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_spec_or_specfile.add_argument(
|
||||
check_specs.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
@@ -317,26 +339,36 @@ def push_fn(args):
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(args.mirror)
|
||||
target_image = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
target_image = None
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
# When neither --signed, --unsigned nor --key are specified, use the mirror's default.
|
||||
if args.signed is None and not args.key:
|
||||
unsigned = not mirror.signed
|
||||
else:
|
||||
unsigned = not (args.key or args.signed)
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if image_ref:
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not args.unsigned:
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
@@ -344,26 +376,48 @@ def push_fn(args):
|
||||
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
specs,
|
||||
roots,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
url = args.mirror.push_url
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
failed = []
|
||||
|
||||
# TODO: unify this logic in the future.
|
||||
if image_ref:
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
if args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
@@ -371,10 +425,10 @@ def push_fn(args):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
@@ -382,7 +436,7 @@ def push_fn(args):
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {url}"
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
@@ -424,11 +478,11 @@ def push_fn(args):
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if image_ref and len(skipped) < len(specs) and args.update_index:
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
@@ -494,17 +548,21 @@ def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
spec: spack.spec.Spec,
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
@@ -513,7 +571,7 @@ def _put_manifest(
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
@@ -525,20 +583,14 @@ def _put_manifest(
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
# We use this to store the Spack spec, so we can use it to create an index.
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
config.update(spec_dict)
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
@@ -569,48 +621,69 @@ def _put_manifest(
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
"annotations": {"org.opencontainers.image.description": spec.format()},
|
||||
}
|
||||
|
||||
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
|
||||
if annotations:
|
||||
oci_manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
|
||||
upload_manifest_with_retry(image_ref, oci_manifest=oci_manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
return image_ref_for_spec
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
args,
|
||||
image_ref: ImageReference,
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: multiprocessing.pool.Pool,
|
||||
) -> List[str]:
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
args: The command line arguments.
|
||||
image_ref: The image reference.
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
List[str]: The list of skipped specs (already in the buildcache).
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# The base image to use for the package. When not set, we use
|
||||
# the OCI registry only for storage, and do not use any base image.
|
||||
base_image_ref: Optional[ImageReference] = (
|
||||
ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
)
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@@ -620,11 +693,11 @@ def _push_oci(
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not args.force:
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
@@ -637,46 +710,63 @@ def _push_oci(
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base image layers, probably fine to do sequentially.
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
# Get base image details, if we don't have them yet
|
||||
if architecture in base_images:
|
||||
continue
|
||||
if base_image_ref is None:
|
||||
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
|
||||
else:
|
||||
base_images[architecture] = copy_missing_layers_with_retry(
|
||||
base_image_ref, image_ref, architecture
|
||||
)
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pushed_image_ref = pool.starmap(
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
@@ -813,15 +903,24 @@ def check_fn(args: argparse.Namespace):
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs)
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.environment.depfile
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -606,7 +607,9 @@ def ci_rebuild(args):
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
|
@@ -67,12 +67,13 @@ class ConstraintAction(argparse.Action):
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
# Query specs from command line
|
||||
self.values = values
|
||||
namespace.constraint = values
|
||||
self.constraint = namespace.constraint = values
|
||||
self.constraint_specs = namespace.constraint_specs = []
|
||||
namespace.specs = self._specs
|
||||
|
||||
def _specs(self, **kwargs):
|
||||
qspecs = spack.cmd.parse_specs(self.values)
|
||||
# store parsed specs in spec.constraint after a call to specs()
|
||||
self.constraint_specs[:] = spack.cmd.parse_specs(self.constraint)
|
||||
|
||||
# If an environment is provided, we'll restrict the search to
|
||||
# only its installed packages.
|
||||
@@ -81,12 +82,12 @@ def _specs(self, **kwargs):
|
||||
kwargs["hashes"] = set(env.all_hashes())
|
||||
|
||||
# return everything for an empty query.
|
||||
if not qspecs:
|
||||
if not self.constraint_specs:
|
||||
return spack.store.STORE.db.query(**kwargs)
|
||||
|
||||
# Return only matching stuff otherwise.
|
||||
specs = {}
|
||||
for spec in qspecs:
|
||||
for spec in self.constraint_specs:
|
||||
for s in spack.store.STORE.db.query(spec, **kwargs):
|
||||
# This is fast for already-concrete specs
|
||||
specs[s.dag_hash()] = s
|
||||
@@ -384,10 +385,11 @@ def install_status():
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in an upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
help=(
|
||||
"show install status of packages\n"
|
||||
"[+] installed [^] installed in an upstream\n"
|
||||
" - not installed [-] missing dep of installed package\n"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -64,20 +64,14 @@ def setup_parser(subparser):
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
# Info
|
||||
info_parser = sp.add_parser("info", help="show compiler paths")
|
||||
info_parser.add_argument("compiler_spec")
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -48,6 +49,7 @@ def setup_parser(subparser):
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
@@ -131,32 +133,50 @@ def _get_scope_and_section(args):
|
||||
return scope, section
|
||||
|
||||
|
||||
def print_configuration(args, *, blame: bool) -> None:
|
||||
if args.scope and args.section is None:
|
||||
tty.die(f"the argument --scope={args.scope} requires specifying a section.")
|
||||
|
||||
if args.section is not None:
|
||||
spack.config.CONFIG.print_section(args.section, blame=blame, scope=args.scope)
|
||||
return
|
||||
|
||||
print_flattened_configuration(blame=blame)
|
||||
|
||||
|
||||
def print_flattened_configuration(*, blame: bool) -> None:
|
||||
"""Prints to stdout a flattened version of the configuration.
|
||||
|
||||
Args:
|
||||
blame: if True, shows file provenance for each entry in the configuration.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env is not None:
|
||||
pristine = env.manifest.pristine_yaml_content
|
||||
flattened = pristine.copy()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||
else:
|
||||
flattened = syaml.syaml_dict()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = syaml.syaml_dict()
|
||||
|
||||
for config_section in spack.config.SECTION_SCHEMAS:
|
||||
current = spack.config.get(config_section)
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY][config_section] = current
|
||||
syaml.dump_config(flattened, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
|
||||
|
||||
def config_get(args):
|
||||
"""Dump merged YAML configuration for a specific section.
|
||||
|
||||
With no arguments and an active environment, print the contents of
|
||||
the environment's manifest file (spack.yaml).
|
||||
"""
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
else:
|
||||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
print_configuration(args, blame=False)
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
print_configuration(args, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
|
@@ -45,10 +45,41 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
entry = {"spec": str(spec)}
|
||||
if path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec, abspath):
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage[0].disable_mirrors()
|
||||
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
if not args.spec:
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
@@ -66,7 +97,7 @@ def develop(parser, args):
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
env.develop(spec=spec, path=path, clone=True)
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
@@ -81,12 +112,16 @@ def develop(parser, args):
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
raise SpackError("Packages to develop must have a concrete version")
|
||||
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# default path is relative path to spec.name
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
if not os.path.isabs(path):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
|
||||
# clone default: only if the path doesn't exist
|
||||
clone = args.clone
|
||||
@@ -96,15 +131,24 @@ def develop(parser, args):
|
||||
if not clone and not os.path.exists(abspath):
|
||||
raise SpackError("Provided path %s does not exist" % abspath)
|
||||
|
||||
if clone and os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
if clone:
|
||||
if os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
changed = env.develop(spec, path, clone)
|
||||
if changed:
|
||||
env.write()
|
||||
_update_config(spec, path)
|
||||
|
@@ -44,6 +44,9 @@ def setup_parser(subparser):
|
||||
action="append",
|
||||
help="select the attributes to show (defaults to all)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--ignore", action="append", help="omit diffs related to these dependencies"
|
||||
)
|
||||
|
||||
|
||||
def shift(asp_function):
|
||||
@@ -54,7 +57,7 @@ def shift(asp_function):
|
||||
return asp.AspFunction(first, rest)
|
||||
|
||||
|
||||
def compare_specs(a, b, to_string=False, color=None):
|
||||
def compare_specs(a, b, to_string=False, color=None, ignore_packages=None):
|
||||
"""
|
||||
Generate a comparison, including diffs (for each side) and an intersection.
|
||||
|
||||
@@ -73,6 +76,14 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
if color is None:
|
||||
color = get_color_when()
|
||||
|
||||
a = a.copy()
|
||||
b = b.copy()
|
||||
|
||||
if ignore_packages:
|
||||
for pkg_name in ignore_packages:
|
||||
a.trim(pkg_name)
|
||||
b.trim(pkg_name)
|
||||
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
@@ -209,7 +220,7 @@ def diff(parser, args):
|
||||
|
||||
# Calculate the comparison (c)
|
||||
color = False if args.dump_json else get_color_when()
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color)
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color, ignore_packages=args.ignore)
|
||||
|
||||
# Default to all attributes
|
||||
attributes = args.attribute or ["all"]
|
||||
|
@@ -261,10 +261,8 @@ def find(parser, args):
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
msg = "No package matches the query: {0}"
|
||||
msg = msg.format(" ".join(args.constraint))
|
||||
tty.msg(msg)
|
||||
raise SystemExit(1)
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
|
@@ -327,7 +327,7 @@ def _variants_by_name_when(pkg):
|
||||
"""Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
|
||||
# TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
|
||||
variants = {}
|
||||
for name, (variant, whens) in pkg.variants.items():
|
||||
for name, (variant, whens) in sorted(pkg.variants.items()):
|
||||
for when in whens:
|
||||
variants.setdefault(name, {}).setdefault(when, []).append(variant)
|
||||
return variants
|
||||
|
@@ -162,8 +162,8 @@ def setup_parser(subparser):
|
||||
"--no-check-signature",
|
||||
action="store_true",
|
||||
dest="unsigned",
|
||||
default=False,
|
||||
help="do not check signatures of binary packages",
|
||||
default=None,
|
||||
help="do not check signatures of binary packages (override mirror config)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--show-log-on-error",
|
||||
|
@@ -98,15 +98,15 @@ def load(parser, args):
|
||||
spack.cmd.display_specs(results)
|
||||
return
|
||||
|
||||
constraint_specs = spack.cmd.parse_specs(args.constraint)
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.constraint)
|
||||
spack.cmd.disambiguate_spec(spec, env, first=args.load_first) for spec in constraint_specs
|
||||
]
|
||||
|
||||
if not args.shell:
|
||||
specs_str = " ".join(args.constraint) or "SPECS"
|
||||
specs_str = " ".join(str(s) for s in constraint_specs) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||
"spack load", f" eval `spack load {{sh_arg}} {specs_str}`"
|
||||
)
|
||||
return 1
|
||||
|
||||
|
@@ -107,6 +107,23 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
help="do not require signing and signature verification when pushing and installing from "
|
||||
"this build cache",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
add_parser_signed.add_argument(
|
||||
"--signed",
|
||||
help="require signing and signature verification when pushing and installing from this "
|
||||
"build cache",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
arguments.add_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
@@ -157,6 +174,23 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
help="do not require signing and signature verification when pushing and installing from "
|
||||
"this build cache",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--signed",
|
||||
help="require signing and signature verification when pushing and installing from this "
|
||||
"build cache",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
@@ -168,10 +202,7 @@ def setup_parser(subparser):
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
|
||||
@@ -186,6 +217,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
@@ -201,6 +233,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
@@ -233,6 +267,8 @@ def _configure_mirror(args):
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
|
@@ -388,21 +388,15 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
callbacks[args.subparser_name](module_type, specs, args)
|
||||
|
||||
except MultipleSpecsMatch:
|
||||
msg = "the constraint '{query}' matches multiple packages:\n"
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||
for s in specs:
|
||||
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||
tty.error(msg.format(query=args.constraint))
|
||||
tty.die(
|
||||
"In this context exactly **one** match is needed: "
|
||||
"please specify your constraints better."
|
||||
)
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
||||
except NoSpecMatches:
|
||||
msg = "the constraint '{query}' matches no package."
|
||||
tty.error(msg.format(query=args.constraint))
|
||||
tty.die(
|
||||
"In this context exactly **one** match is needed: "
|
||||
"please specify your constraints better."
|
||||
)
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches no package."
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
@@ -42,10 +42,7 @@ def setup_parser(subparser):
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=repo_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
|
||||
# Add
|
||||
|
@@ -17,21 +17,51 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="remove all specs from (clear) the environment"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
|
||||
|
||||
def _update_config(specs_to_remove, remove_all=False):
|
||||
def change_fn(dev_config):
|
||||
modified = False
|
||||
for spec in specs_to_remove:
|
||||
if spec.name in dev_config:
|
||||
tty.msg("Undevelop: removing {0}".format(spec.name))
|
||||
del dev_config[spec.name]
|
||||
modified = True
|
||||
if remove_all and dev_config:
|
||||
dev_config.clear()
|
||||
modified = True
|
||||
return modified
|
||||
|
||||
spack.config.update_all("develop", change_fn)
|
||||
|
||||
|
||||
def undevelop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="undevelop")
|
||||
|
||||
remove_specs = None
|
||||
remove_all = False
|
||||
if args.all:
|
||||
specs = env.dev_specs.keys()
|
||||
remove_all = True
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
remove_specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="undevelop")
|
||||
with env.write_transaction():
|
||||
changed = False
|
||||
for spec in specs:
|
||||
tty.msg("Removing %s from environment %s development specs" % (spec, env.name))
|
||||
changed |= env.undevelop(spec)
|
||||
if changed:
|
||||
env.write()
|
||||
_update_config(remove_specs, remove_all)
|
||||
|
||||
updated_all_dev_specs = set(spack.config.get("develop"))
|
||||
remove_spec_names = set(x.name for x in remove_specs)
|
||||
|
||||
if remove_all:
|
||||
not_fully_removed = updated_all_dev_specs
|
||||
else:
|
||||
not_fully_removed = updated_all_dev_specs & remove_spec_names
|
||||
|
||||
if not_fully_removed:
|
||||
tty.msg(
|
||||
"The following specs could not be removed as develop specs"
|
||||
" - see `spack config blame develop` to locate files requiring"
|
||||
f" manual edits: {', '.join(not_fully_removed)}"
|
||||
)
|
||||
|
@@ -227,9 +227,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# has been used, then test that extension.
|
||||
pytest_root = spack.paths.spack_root
|
||||
if args.extension:
|
||||
target = args.extension
|
||||
extensions = spack.extensions.get_extension_paths()
|
||||
pytest_root = spack.extensions.path_for_extension(target, *extensions)
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
|
@@ -20,16 +20,16 @@ def __init__(self, *args, **kwargs):
|
||||
self.version_argument = "-V"
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["craycc", "cc"]
|
||||
cc_names = ["craycc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["crayCC", "CC"]
|
||||
cxx_names = ["crayCC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["crayftn", "ftn"]
|
||||
f77_names = ["crayftn"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["crayftn", "ftn"]
|
||||
fc_names = ["crayftn"]
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
@@ -35,12 +35,9 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Dict, List, Optional, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, rename
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.paths
|
||||
@@ -70,6 +67,7 @@
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
"repos": spack.schema.repos.schema,
|
||||
"packages": spack.schema.packages.schema,
|
||||
@@ -113,28 +111,34 @@
|
||||
#: Base name for the (internal) overrides scope.
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
#: Type used for raw YAML configuration
|
||||
YamlConfigDict = Dict[str, Any]
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
A scope is one directory containing named configuration files.
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc).
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||
"""
|
||||
|
||||
def __init__(self, name, path):
|
||||
def __init__(self, name, path) -> None:
|
||||
self.name = name # scope name.
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return os.sep in self.name
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
"""Returns the filename associated with a given section"""
|
||||
_validate_section_name(section)
|
||||
return os.path.join(self.path, "%s.yaml" % section)
|
||||
return os.path.join(self.path, f"{section}.yaml")
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Returns the data associated with a given section"""
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
@@ -142,39 +146,44 @@ def get_section(self, section):
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section):
|
||||
def _write_section(self, section: str) -> None:
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
filesystem.mkdirp(self.path)
|
||||
with open(filename, "w") as f:
|
||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ConfigScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class SingleFileScope(ConfigScope):
|
||||
"""This class represents a configuration scope in a single YAML file."""
|
||||
|
||||
def __init__(self, name, path, schema, yaml_path=None):
|
||||
def __init__(
|
||||
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||
) -> None:
|
||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||
|
||||
Arguments:
|
||||
schema (dict): jsonschema for the file to read
|
||||
yaml_path (list): path in the schema where config data can be
|
||||
found.
|
||||
|
||||
If the schema accepts the following yaml data, the yaml_path
|
||||
would be ['outer', 'inner']
|
||||
|
||||
@@ -186,18 +195,18 @@ def __init__(self, name, path, schema, yaml_path=None):
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super().__init__(name, path)
|
||||
self._raw_data = None
|
||||
self._raw_data: Optional[YamlConfigDict] = None
|
||||
self.schema = schema
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self):
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section) -> str:
|
||||
return self.path
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
# read raw data from the file, which looks like:
|
||||
# {
|
||||
# 'config': {
|
||||
@@ -246,8 +255,8 @@ def get_section(self, section):
|
||||
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section):
|
||||
data_to_write = self._raw_data
|
||||
def _write_section(self, section: str) -> None:
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
# been written to disk. We need to construct the portion of the data
|
||||
@@ -277,18 +286,18 @@ def _write_section(self, section):
|
||||
validate(data_to_write, self.schema)
|
||||
try:
|
||||
parent = os.path.dirname(self.path)
|
||||
mkdirp(parent)
|
||||
filesystem.mkdirp(parent)
|
||||
|
||||
tmp = os.path.join(parent, ".%s.tmp" % os.path.basename(self.path))
|
||||
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
||||
with open(tmp, "w") as f:
|
||||
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
||||
rename(tmp, self.path)
|
||||
filesystem.rename(tmp, self.path)
|
||||
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to config file {str(e)}") from e
|
||||
|
||||
def __repr__(self):
|
||||
return "<SingleFileScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class ImmutableConfigScope(ConfigScope):
|
||||
@@ -297,11 +306,11 @@ class ImmutableConfigScope(ConfigScope):
|
||||
This is used for ConfigScopes passed on the command line.
|
||||
"""
|
||||
|
||||
def _write_section(self, section):
|
||||
raise ConfigError("Cannot write to immutable scope %s" % self)
|
||||
def _write_section(self, section) -> None:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
def __repr__(self):
|
||||
return "<ImmutableConfigScope: %s: %s>" % (self.name, self.path)
|
||||
def __repr__(self) -> str:
|
||||
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class InternalConfigScope(ConfigScope):
|
||||
@@ -312,56 +321,58 @@ class InternalConfigScope(ConfigScope):
|
||||
override settings from files.
|
||||
"""
|
||||
|
||||
def __init__(self, name, data=None):
|
||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
super().__init__(name, None)
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
if data:
|
||||
if data is not None:
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||
|
||||
def get_section(self, section):
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Just reads from an internal dictionary."""
|
||||
if section not in self.sections:
|
||||
self.sections[section] = None
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section):
|
||||
def _write_section(self, section: str) -> None:
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<InternalConfigScope: %s>" % self.name
|
||||
def __repr__(self) -> str:
|
||||
return f"<InternalConfigScope: {self.name}>"
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
# no cache to clear here.
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _process_dict_keyname_overrides(data):
|
||||
def _process_dict_keyname_overrides(data: YamlConfigDict) -> YamlConfigDict:
|
||||
"""Turn a trailing `:' in a key name into an override attribute."""
|
||||
result = {}
|
||||
# Below we have a lot of type directives, since we hack on types and monkey-patch them
|
||||
# by adding attributes that otherwise they won't have.
|
||||
result: YamlConfigDict = {}
|
||||
for sk, sv in data.items():
|
||||
if sk.endswith(":"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.override = True
|
||||
key.override = True # type: ignore[attr-defined]
|
||||
elif sk.endswith("+"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.prepend = True
|
||||
key.prepend = True # type: ignore[attr-defined]
|
||||
elif sk.endswith("-"):
|
||||
key = syaml.syaml_str(sk[:-1])
|
||||
key.append = True
|
||||
key.append = True # type: ignore[attr-defined]
|
||||
else:
|
||||
key = sk
|
||||
key = sk # type: ignore[assignment]
|
||||
|
||||
if isinstance(sv, dict):
|
||||
result[key] = InternalConfigScope._process_dict_keyname_overrides(sv)
|
||||
@@ -394,7 +405,7 @@ class Configuration:
|
||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||
scopes: Dict[str, ConfigScope]
|
||||
|
||||
def __init__(self, *scopes: ConfigScope):
|
||||
def __init__(self, *scopes: ConfigScope) -> None:
|
||||
"""Initialize a configuration with an initial list of scopes.
|
||||
|
||||
Args:
|
||||
@@ -405,26 +416,26 @@ def __init__(self, *scopes: ConfigScope):
|
||||
self.scopes = collections.OrderedDict()
|
||||
for scope in scopes:
|
||||
self.push_scope(scope)
|
||||
self.format_updates: Dict[str, List[str]] = collections.defaultdict(list)
|
||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope):
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
||||
self.scopes[scope.name] = scope
|
||||
|
||||
@_config_mutator
|
||||
def pop_scope(self) -> ConfigScope:
|
||||
"""Remove the highest precedence scope and return it."""
|
||||
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||
return scope
|
||||
|
||||
@_config_mutator
|
||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||
scope = self.scopes.pop(scope_name, None)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||
return scope
|
||||
|
||||
@property
|
||||
@@ -481,16 +492,16 @@ def _validate_scope(self, scope: Optional[str]) -> ConfigScope:
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys())
|
||||
f"Invalid config scope: '{scope}'. Must be one of {self.scopes.keys()}"
|
||||
)
|
||||
|
||||
def get_config_filename(self, scope, section) -> str:
|
||||
def get_config_filename(self, scope: str, section: str) -> str:
|
||||
"""For some scope and section, get the name of the configuration file."""
|
||||
scope = self._validate_scope(scope)
|
||||
return scope.get_section_filename(section)
|
||||
|
||||
@_config_mutator
|
||||
def clear_caches(self):
|
||||
def clear_caches(self) -> None:
|
||||
"""Clears the caches for configuration files,
|
||||
|
||||
This will cause files to be re-read upon the next request."""
|
||||
@@ -500,7 +511,7 @@ def clear_caches(self):
|
||||
@_config_mutator
|
||||
def update_config(
|
||||
self, section: str, update_data: Dict, scope: Optional[str] = None, force: bool = False
|
||||
):
|
||||
) -> None:
|
||||
"""Update the configuration file for a particular scope.
|
||||
|
||||
Overwrites contents of a section in a scope with update_data,
|
||||
@@ -514,10 +525,10 @@ def update_config(
|
||||
format will fail to update unless ``force`` is True.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration to be updated
|
||||
update_data (dict): data to be used for the update
|
||||
scope (str): scope to be updated
|
||||
force (str): force the update
|
||||
section: section of the configuration to be updated
|
||||
update_data: data to be used for the update
|
||||
scope: scope to be updated
|
||||
force: force the update
|
||||
"""
|
||||
if self.format_updates.get(section) and not force:
|
||||
msg = (
|
||||
@@ -546,7 +557,7 @@ def update_config(
|
||||
|
||||
scope._write_section(section)
|
||||
|
||||
def get_config(self, section, scope=None):
|
||||
def get_config(self, section: str, scope: Optional[str] = None) -> YamlConfigDict:
|
||||
"""Get configuration settings for a section.
|
||||
|
||||
If ``scope`` is ``None`` or not provided, return the merged contents
|
||||
@@ -573,12 +584,12 @@ def get_config(self, section, scope=None):
|
||||
"""
|
||||
return self._get_config_memoized(section, scope)
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_config_memoized(self, section, scope):
|
||||
@lang.memoized
|
||||
def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfigDict:
|
||||
_validate_section_name(section)
|
||||
|
||||
if scope is None:
|
||||
scopes = self.scopes.values()
|
||||
scopes = list(self.scopes.values())
|
||||
else:
|
||||
scopes = [self._validate_scope(scope)]
|
||||
|
||||
@@ -613,7 +624,7 @@ def _get_config_memoized(self, section, scope):
|
||||
ret = syaml.syaml_dict(ret)
|
||||
return ret
|
||||
|
||||
def get(self, path, default=None, scope=None):
|
||||
def get(self, path: str, default: Optional[Any] = None, scope: Optional[str] = None) -> Any:
|
||||
"""Get a config section or a single value from one.
|
||||
|
||||
Accepts a path syntax that allows us to grab nested config map
|
||||
@@ -644,7 +655,7 @@ def get(self, path, default=None, scope=None):
|
||||
return value
|
||||
|
||||
@_config_mutator
|
||||
def set(self, path, value, scope=None):
|
||||
def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
"""Convenience function for setting single values in config files.
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
@@ -686,21 +697,22 @@ def set(self, path, value, scope=None):
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over scopes in this configuration."""
|
||||
for scope in self.scopes.values():
|
||||
yield scope
|
||||
yield from self.scopes.values()
|
||||
|
||||
def print_section(self, section, blame=False):
|
||||
def print_section(self, section: str, blame: bool = False, *, scope=None) -> None:
|
||||
"""Print a configuration to stdout."""
|
||||
try:
|
||||
data = syaml.syaml_dict()
|
||||
data[section] = self.get_config(section)
|
||||
data[section] = self.get_config(section, scope=scope)
|
||||
syaml.dump_config(data, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
except (syaml.SpackYAMLError, IOError) as e:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigError(f"cannot read '{section}' configuration") from e
|
||||
|
||||
|
||||
@contextmanager
|
||||
def override(path_or_scope, value=None):
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
) -> Generator[Union[lang.Singleton, Configuration], None, None]:
|
||||
"""Simple way to override config settings within a context.
|
||||
|
||||
Arguments:
|
||||
@@ -718,10 +730,10 @@ def override(path_or_scope, value=None):
|
||||
else:
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(rf"^{base_name}")]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
scope_name = f"{base_name}{num_overrides}"
|
||||
if scope_name in current_overrides:
|
||||
num_overrides += 1
|
||||
else:
|
||||
@@ -738,12 +750,13 @@ def override(path_or_scope, value=None):
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
#: configuration scopes added on the command line set by ``spack.main.main()``
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
plat_name = os.path.join(name, platform)
|
||||
@@ -751,7 +764,9 @@ def _add_platform_scope(cfg, scope_type, name, path):
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
|
||||
|
||||
def _add_command_line_scopes(cfg, command_line_scopes):
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument.
|
||||
|
||||
Command line scopes are named after their position in the arg list.
|
||||
@@ -760,26 +775,22 @@ def _add_command_line_scopes(cfg, command_line_scopes):
|
||||
# We ensure that these scopes exist and are readable, as they are
|
||||
# provided on the command line by the user.
|
||||
if not os.path.isdir(path):
|
||||
raise ConfigError("config scope is not a directory: '%s'" % path)
|
||||
raise ConfigError(f"config scope is not a directory: '{path}'")
|
||||
elif not os.access(path, os.R_OK):
|
||||
raise ConfigError("config scope is not readable: '%s'" % path)
|
||||
raise ConfigError(f"config scope is not readable: '{path}'")
|
||||
|
||||
# name based on order on the command line
|
||||
name = "cmd_scope_%d" % i
|
||||
name = f"cmd_scope_{i:d}"
|
||||
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
|
||||
|
||||
def create():
|
||||
def create() -> Configuration:
|
||||
"""Singleton Configuration instance.
|
||||
|
||||
This constructs one instance associated with this module and returns
|
||||
it. It is bundled inside a function so that configuration can be
|
||||
initialized lazily.
|
||||
|
||||
Return:
|
||||
(Configuration): object for accessing spack configuration
|
||||
|
||||
"""
|
||||
cfg = Configuration()
|
||||
|
||||
@@ -828,16 +839,25 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, lang.Singleton] = lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
"""Add updates to a config from a filename"""
|
||||
# Extract internal attributes, if we are dealing with an environment
|
||||
data = read_config_file(filename)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
if spack.schema.env.TOP_LEVEL_KEY in data:
|
||||
data = data[spack.schema.env.TOP_LEVEL_KEY]
|
||||
|
||||
msg = (
|
||||
"unexpected 'None' value when retrieving configuration. "
|
||||
"Please submit a bug-report at https://github.com/spack/spack/issues"
|
||||
)
|
||||
assert data is not None, msg
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
@@ -855,7 +875,7 @@ def add_from_file(filename, scope=None):
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
def add(fullpath: str, scope: Optional[str] = None) -> None:
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
components = process_config_path(fullpath)
|
||||
@@ -903,12 +923,12 @@ def add(fullpath, scope=None):
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
def get(path: str, default: Optional[Any] = None, scope: Optional[str] = None) -> Any:
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
"""Convenience function for setting single values in config files.
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
@@ -916,32 +936,113 @@ def set(path, value, scope=None):
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""
|
||||
Return list of writable scopes
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
list(
|
||||
x
|
||||
for x in CONFIG.scopes.values()
|
||||
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def writable_scope_names() -> List[str]:
|
||||
return list(x.name for x in writable_scopes())
|
||||
|
||||
|
||||
def matched_config(cfg_path: str) -> List[Tuple[str, Any]]:
|
||||
return [(scope, get(cfg_path, scope=scope)) for scope in writable_scope_names()]
|
||||
|
||||
|
||||
def change_or_add(
|
||||
section_name: str, find_fn: Callable[[str], bool], update_fn: Callable[[str], None]
|
||||
) -> None:
|
||||
"""Change or add a subsection of config, with additional logic to
|
||||
select a reasonable scope where the change is applied.
|
||||
|
||||
Search through config scopes starting with the highest priority:
|
||||
the first matching a criteria (determined by ``find_fn``) is updated;
|
||||
if no such config exists, find the first config scope that defines
|
||||
any config for the named section; if no scopes define any related
|
||||
config, then update the highest-priority config scope.
|
||||
"""
|
||||
configs_by_section = matched_config(section_name)
|
||||
|
||||
found = False
|
||||
for scope, section in configs_by_section:
|
||||
found = find_fn(section)
|
||||
if found:
|
||||
break
|
||||
|
||||
if found:
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scope meets the criteria specified by ``find_fn``,
|
||||
# then look for a scope that has any content (for the specified
|
||||
# section name)
|
||||
for scope, section in configs_by_section:
|
||||
if section:
|
||||
update_fn(section)
|
||||
found = True
|
||||
break
|
||||
|
||||
if found:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scopes define any config for the named section, then
|
||||
# modify the highest-priority scope.
|
||||
scope, section = configs_by_section[0]
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def update_all(section_name: str, change_fn: Callable[[str], bool]) -> None:
|
||||
"""Change a config section, which may have details duplicated
|
||||
across multiple scopes.
|
||||
"""
|
||||
configs_by_section = matched_config("develop")
|
||||
|
||||
for scope, section in configs_by_section:
|
||||
modified = change_fn(section)
|
||||
if modified:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def _validate_section_name(section: str) -> None:
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
f"Invalid config section: '{section}'. Options are: {' '.join(SECTION_SCHEMAS.keys())}"
|
||||
)
|
||||
|
||||
|
||||
def validate(data, schema, filename=None):
|
||||
def validate(
|
||||
data: YamlConfigDict, schema: YamlConfigDict, filename: Optional[str] = None
|
||||
) -> YamlConfigDict:
|
||||
"""Validate data read in from a Spack YAML file.
|
||||
|
||||
Arguments:
|
||||
data (dict or list): data read from a Spack YAML file
|
||||
schema (dict or list): jsonschema to validate data
|
||||
data: data read from a Spack YAML file
|
||||
schema: jsonschema to validate data
|
||||
|
||||
This leverages the line information (start_mark, end_mark) stored
|
||||
on Spack YAML structures.
|
||||
@@ -964,7 +1065,9 @@ def validate(data, schema, filename=None):
|
||||
return test_data
|
||||
|
||||
|
||||
def read_config_file(filename, schema=None):
|
||||
def read_config_file(
|
||||
filename: str, schema: Optional[YamlConfigDict] = None
|
||||
) -> Optional[YamlConfigDict]:
|
||||
"""Read a YAML configuration file.
|
||||
|
||||
User can provide a schema for validation. If no schema is provided,
|
||||
@@ -976,17 +1079,17 @@ def read_config_file(filename, schema=None):
|
||||
|
||||
if not os.path.exists(filename):
|
||||
# Ignore nonexistent files.
|
||||
tty.debug("Skipping nonexistent config path {0}".format(filename), level=3)
|
||||
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
|
||||
return None
|
||||
|
||||
elif not os.path.isfile(filename):
|
||||
raise ConfigFileError("Invalid configuration. %s exists but is not a file." % filename)
|
||||
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
|
||||
|
||||
elif not os.access(filename, os.R_OK):
|
||||
raise ConfigFileError("Config file is not readable: {0}".format(filename))
|
||||
raise ConfigFileError(f"Config file is not readable: {filename}")
|
||||
|
||||
try:
|
||||
tty.debug("Reading config from file {0}".format(filename))
|
||||
tty.debug(f"Reading config from file {filename}")
|
||||
with open(filename) as f:
|
||||
data = syaml.load_config(f)
|
||||
|
||||
@@ -1004,11 +1107,11 @@ def read_config_file(filename, schema=None):
|
||||
except syaml.SpackYAMLError as e:
|
||||
raise ConfigFileError(str(e)) from e
|
||||
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
|
||||
|
||||
|
||||
def _override(string):
|
||||
def _override(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
||||
@@ -1019,7 +1122,7 @@ def _override(string):
|
||||
return hasattr(string, "override") and string.override
|
||||
|
||||
|
||||
def _append(string):
|
||||
def _append(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
@@ -1033,7 +1136,7 @@ def _append(string):
|
||||
return getattr(string, "append", False)
|
||||
|
||||
|
||||
def _prepend(string):
|
||||
def _prepend(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||
@@ -1105,7 +1208,7 @@ def get_valid_type(path):
|
||||
return types[schema_type]()
|
||||
else:
|
||||
return type(None)
|
||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||
raise ConfigError(f"Cannot determine valid type for path '{path}'.")
|
||||
|
||||
|
||||
def remove_yaml(dest, source):
|
||||
@@ -1233,7 +1336,7 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
def process_config_path(path):
|
||||
def process_config_path(path: str) -> List[str]:
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
@@ -1246,29 +1349,29 @@ def process_config_path(path):
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
raise syaml.SpackYAMLError(f"Illegal leading `:' in path `{path}'", "")
|
||||
seen_override_in_path = False
|
||||
while path:
|
||||
front, sep, path = path.partition(":")
|
||||
if (sep and not path) or path.startswith(":"):
|
||||
if seen_override_in_path:
|
||||
raise syaml.SpackYAMLError(
|
||||
"Meaningless second override" " indicator `::' in path `{0}'".format(path), ""
|
||||
f"Meaningless second override indicator `::' in path `{path}'", ""
|
||||
)
|
||||
path = path.lstrip(":")
|
||||
front = syaml.syaml_str(front)
|
||||
front.override = True
|
||||
front.override = True # type: ignore[attr-defined]
|
||||
seen_override_in_path = True
|
||||
|
||||
elif front.endswith("+"):
|
||||
front = front.rstrip("+")
|
||||
front = syaml.syaml_str(front)
|
||||
front.prepend = True
|
||||
front.prepend = True # type: ignore[attr-defined]
|
||||
|
||||
elif front.endswith("-"):
|
||||
front = front.rstrip("-")
|
||||
front = syaml.syaml_str(front)
|
||||
front.append = True
|
||||
front.append = True # type: ignore[attr-defined]
|
||||
|
||||
result.append(front)
|
||||
|
||||
@@ -1288,7 +1391,7 @@ def process_config_path(path):
|
||||
#
|
||||
# Settings for commands that modify configuration
|
||||
#
|
||||
def default_modify_scope(section="config"):
|
||||
def default_modify_scope(section: str = "config") -> str:
|
||||
"""Return the config scope that commands should modify by default.
|
||||
|
||||
Commands that modify configuration by default modify the *highest*
|
||||
@@ -1304,23 +1407,15 @@ def default_modify_scope(section="config"):
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
"""Return the config scope that is listed by default.
|
||||
|
||||
Commands that list configuration list *all* scopes (merged) by default.
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
def _update_in_memory(data, section):
|
||||
def _update_in_memory(data: YamlConfigDict, section: str) -> bool:
|
||||
"""Update the format of the configuration data in memory.
|
||||
|
||||
This function assumes the section is valid (i.e. validation
|
||||
is responsibility of the caller)
|
||||
|
||||
Args:
|
||||
data (dict): configuration data
|
||||
section (str): section of the configuration to update
|
||||
data: configuration data
|
||||
section: section of the configuration to update
|
||||
|
||||
Returns:
|
||||
True if the data was changed, False otherwise
|
||||
@@ -1330,14 +1425,14 @@ def _update_in_memory(data, section):
|
||||
return changed
|
||||
|
||||
|
||||
def ensure_latest_format_fn(section):
|
||||
def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
"""Return a function that takes as input a dictionary read from
|
||||
a configuration file and update it to the latest format.
|
||||
|
||||
The function returns True if there was any update, False otherwise.
|
||||
|
||||
Args:
|
||||
section (str): section of the configuration e.g. "packages",
|
||||
section: section of the configuration e.g. "packages",
|
||||
"config", etc.
|
||||
"""
|
||||
# The line below is based on the fact that every module we need
|
||||
@@ -1348,7 +1443,9 @@ def ensure_latest_format_fn(section):
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(*scopes_or_paths):
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
|
||||
@@ -1372,8 +1469,8 @@ def use_configuration(*scopes_or_paths):
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _config_from(scopes_or_paths):
|
||||
@lang.memoized
|
||||
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
||||
scopes = []
|
||||
for scope_or_path in scopes_or_paths:
|
||||
# If we have a config scope we are already done
|
||||
@@ -1383,7 +1480,7 @@ def _config_from(scopes_or_paths):
|
||||
|
||||
# Otherwise we need to construct it
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), '"{0}" must be a directory'.format(path)
|
||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
|
||||
@@ -1391,13 +1488,14 @@ def _config_from(scopes_or_paths):
|
||||
return configuration
|
||||
|
||||
|
||||
def raw_github_gitlab_url(url):
|
||||
def raw_github_gitlab_url(url: str) -> str:
|
||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||
|
||||
Args:
|
||||
url: url to be converted to raw form
|
||||
|
||||
Returns: (str) raw github/gitlab url or the original url
|
||||
Returns:
|
||||
Raw github/gitlab url or the original url
|
||||
"""
|
||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||
@@ -1450,7 +1548,7 @@ def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) ->
|
||||
|
||||
def _fetch_file(url):
|
||||
raw = raw_github_gitlab_url(url)
|
||||
tty.debug("Reading config from url {0}".format(raw))
|
||||
tty.debug(f"Reading config from url {raw}")
|
||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||
|
||||
if not url:
|
||||
@@ -1466,8 +1564,8 @@ def _fetch_file(url):
|
||||
basename = os.path.basename(config_url)
|
||||
if skip_existing and basename in existing_files:
|
||||
tty.warn(
|
||||
"Will not fetch configuration from {0} since a version already"
|
||||
"exists in {1}".format(config_url, dest_dir)
|
||||
f"Will not fetch configuration from {config_url} since a "
|
||||
f"version already exists in {dest_dir}"
|
||||
)
|
||||
path = os.path.join(dest_dir, basename)
|
||||
else:
|
||||
@@ -1479,7 +1577,7 @@ def _fetch_file(url):
|
||||
if paths:
|
||||
return dest_dir if len(paths) > 1 else paths[0]
|
||||
|
||||
raise ConfigFileError("Cannot retrieve configuration (yaml) from {0}".format(url))
|
||||
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
@@ -1497,7 +1595,13 @@ class ConfigFileError(ConfigError):
|
||||
class ConfigFormatError(ConfigError):
|
||||
"""Raised when a configuration format does not match its schema."""
|
||||
|
||||
def __init__(self, validation_error, data, filename=None, line=None):
|
||||
def __init__(
|
||||
self,
|
||||
validation_error,
|
||||
data: YamlConfigDict,
|
||||
filename: Optional[str] = None,
|
||||
line: Optional[int] = None,
|
||||
) -> None:
|
||||
# spack yaml has its own file/line marks -- try to find them
|
||||
# we prioritize these over the inputs
|
||||
self.validation_error = validation_error
|
||||
@@ -1511,11 +1615,11 @@ def __init__(self, validation_error, data, filename=None, line=None):
|
||||
# construct location
|
||||
location = "<unknown file>"
|
||||
if filename:
|
||||
location = "%s" % filename
|
||||
location = f"{filename}"
|
||||
if line is not None:
|
||||
location += ":%d" % line
|
||||
location += f":{line:d}"
|
||||
|
||||
message = "%s: %s" % (location, validation_error.message)
|
||||
message = f"{location}: {validation_error.message}"
|
||||
super().__init__(message)
|
||||
|
||||
def _get_mark(self, validation_error, data):
|
||||
|
@@ -309,10 +309,14 @@ def find_windows_kit_roots() -> List[str]:
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
def find_windows_kit_bin_paths(
|
||||
kit_base: Union[Optional[str], Optional[list]] = None
|
||||
) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
if isinstance(kit_base, str):
|
||||
kit_base = kit_base.split(";")
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
@@ -320,10 +324,14 @@ def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
def find_windows_kit_lib_paths(
|
||||
kit_base: Union[Optional[str], Optional[list]] = None
|
||||
) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
if isinstance(kit_base, str):
|
||||
kit_base = kit_base.split(";")
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -307,7 +307,7 @@ def create(
|
||||
|
||||
|
||||
def create_in_dir(
|
||||
manifest_dir: Union[str, pathlib.Path],
|
||||
root: Union[str, pathlib.Path],
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
@@ -318,35 +318,72 @@ def create_in_dir(
|
||||
are considered manifest files.
|
||||
|
||||
Args:
|
||||
manifest_dir: directory where to create the environment.
|
||||
root: directory where to create the environment.
|
||||
init_file: either a lockfile, a manifest file, or None
|
||||
with_view: whether a view should be maintained for the environment. If the value is a
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
"""
|
||||
initialize_environment_dir(manifest_dir, envfile=init_file)
|
||||
initialize_environment_dir(root, envfile=init_file)
|
||||
|
||||
if with_view is None and keep_relative:
|
||||
return Environment(manifest_dir)
|
||||
return Environment(root)
|
||||
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(manifest_dir)
|
||||
manifest = EnvironmentManifestFile(root)
|
||||
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
|
||||
init_file = pathlib.Path(init_file)
|
||||
manifest.absolutify_dev_paths(init_file.parent)
|
||||
|
||||
manifest.flush()
|
||||
|
||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||
shutil.rmtree(manifest_dir)
|
||||
shutil.rmtree(root)
|
||||
raise e
|
||||
|
||||
return Environment(manifest_dir)
|
||||
env = Environment(root)
|
||||
|
||||
if init_file:
|
||||
init_file_dir = os.path.abspath(os.path.dirname(init_file))
|
||||
|
||||
if not keep_relative:
|
||||
if env.path != init_file_dir:
|
||||
# If we are here, we are creating an environment based on an
|
||||
# spack.yaml file in another directory, and moreover we want
|
||||
# dev paths in this environment to refer to their original
|
||||
# locations.
|
||||
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
||||
"""When initializing the environment from a manifest file and we plan
|
||||
to store the environment in a different directory, we have to rewrite
|
||||
relative paths to absolute ones."""
|
||||
with env:
|
||||
dev_specs = spack.config.get("develop", default={}, scope=env.env_file_config_scope_name())
|
||||
if not dev_specs:
|
||||
return
|
||||
for name, entry in dev_specs.items():
|
||||
dev_path = entry["path"]
|
||||
expanded_path = os.path.normpath(os.path.join(init_file_dir, entry["path"]))
|
||||
|
||||
# Skip if the expanded path is the same (e.g. when absolute)
|
||||
if dev_path == expanded_path:
|
||||
continue
|
||||
|
||||
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
||||
|
||||
dev_specs[name]["path"] = expanded_path
|
||||
|
||||
spack.config.set("develop", dev_specs, scope=env.env_file_config_scope_name())
|
||||
|
||||
env._dev_specs = None
|
||||
# If we changed the environment's spack.yaml scope, that will not be reflected
|
||||
# in the manifest that we read
|
||||
env._re_read()
|
||||
|
||||
|
||||
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
||||
@@ -753,8 +790,6 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
|
||||
#: Specs from "spack.yaml"
|
||||
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
|
||||
#: Dev-build specs from "spack.yaml"
|
||||
self.dev_specs: Dict[str, Any] = {}
|
||||
#: User specs from the last concretization
|
||||
self.concretized_user_specs: List[Spec] = []
|
||||
#: Roots associated with the last concretization, in order
|
||||
@@ -765,6 +800,7 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self._repo = None
|
||||
#: Previously active environment
|
||||
self._previous_active = None
|
||||
self._dev_specs = None
|
||||
|
||||
with lk.ReadTransaction(self.txlock):
|
||||
self.manifest = EnvironmentManifestFile(manifest_dir)
|
||||
@@ -858,19 +894,29 @@ def _construct_state_from_manifest(self, re_read=False):
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
|
||||
for name, entry in self.dev_specs.items():
|
||||
# spec must include a concrete version
|
||||
assert Spec(entry["spec"]).versions.concrete_range_as_version
|
||||
# default path is the spec name
|
||||
if "path" not in entry:
|
||||
self.dev_specs[name]["path"] = name
|
||||
|
||||
@property
|
||||
def user_specs(self):
|
||||
return self.spec_lists[user_speclist_name]
|
||||
|
||||
@property
|
||||
def dev_specs(self):
|
||||
if not self._dev_specs:
|
||||
self._dev_specs = self._read_dev_specs()
|
||||
return self._dev_specs
|
||||
|
||||
def _read_dev_specs(self):
|
||||
dev_specs = {}
|
||||
dev_config = spack.config.get("develop", {})
|
||||
for name, entry in dev_config.items():
|
||||
local_entry = {"spec": str(entry["spec"])}
|
||||
# default path is the spec name
|
||||
if "path" not in entry:
|
||||
local_entry["path"] = name
|
||||
else:
|
||||
local_entry["path"] = entry["path"]
|
||||
dev_specs[name] = local_entry
|
||||
return dev_specs
|
||||
|
||||
def clear(self, re_read=False):
|
||||
"""Clear the contents of the environment
|
||||
|
||||
@@ -883,7 +929,7 @@ def clear(self, re_read=False):
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self.dev_specs = {} # dev-build specs from yaml
|
||||
self._dev_specs = {}
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.specs_by_hash = {} # concretized specs by hash
|
||||
@@ -1251,82 +1297,6 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
del self.concretized_order[i]
|
||||
del self.specs_by_hash[dag_hash]
|
||||
|
||||
def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
||||
"""Add dev-build info for package
|
||||
|
||||
Args:
|
||||
spec: Set constraints on development specs. Must include a
|
||||
concrete version.
|
||||
path: Path to find code for developer builds. Relative
|
||||
paths will be resolved relative to the environment.
|
||||
clone: Clone the package code to the path.
|
||||
If clone is False Spack will assume the code is already present
|
||||
at ``path``.
|
||||
|
||||
Return:
|
||||
(bool): True iff the environment was changed.
|
||||
"""
|
||||
spec = spec.copy() # defensive copy since we access cached attributes
|
||||
|
||||
if not spec.versions.concrete:
|
||||
raise SpackEnvironmentError("Cannot develop spec %s without a concrete version" % spec)
|
||||
|
||||
for name, entry in self.dev_specs.items():
|
||||
if name == spec.name:
|
||||
e_spec = Spec(entry["spec"])
|
||||
e_path = entry["path"]
|
||||
|
||||
if e_spec == spec:
|
||||
if path == e_path:
|
||||
tty.msg("Spec %s already configured for development" % spec)
|
||||
return False
|
||||
else:
|
||||
tty.msg("Updating development path for spec %s" % spec)
|
||||
break
|
||||
else:
|
||||
msg = "Updating development spec for package "
|
||||
msg += "%s with path %s" % (spec.name, path)
|
||||
tty.msg(msg)
|
||||
break
|
||||
else:
|
||||
tty.msg("Configuring spec %s for development at path %s" % (spec, path))
|
||||
|
||||
if clone:
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage.disable_mirrors()
|
||||
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
# If it wasn't already in the list, append it
|
||||
entry = {"path": path, "spec": str(spec)}
|
||||
self.dev_specs[spec.name] = entry
|
||||
self.manifest.add_develop_spec(spec.name, entry=entry.copy())
|
||||
return True
|
||||
|
||||
def undevelop(self, spec):
|
||||
"""Remove develop info for abstract spec ``spec``.
|
||||
|
||||
returns True on success, False if no entry existed."""
|
||||
spec = Spec(spec) # In case it's a spec object
|
||||
if spec.name in self.dev_specs:
|
||||
del self.dev_specs[spec.name]
|
||||
self.manifest.remove_develop_spec(spec.name)
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_develop(self, spec):
|
||||
"""Returns true when the spec is built from local sources"""
|
||||
return spec.name in self.dev_specs
|
||||
@@ -2901,57 +2871,6 @@ def remove_default_view(self) -> None:
|
||||
|
||||
self.set_default_view(view=False)
|
||||
|
||||
def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
|
||||
"""Adds a develop spec to the manifest file
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to be developed
|
||||
entry: spec and path of the developed package
|
||||
"""
|
||||
# The environment sets the path to pkg_name is that is implicit
|
||||
if entry["path"] == pkg_name:
|
||||
entry.pop("path")
|
||||
|
||||
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
entry
|
||||
)
|
||||
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
|
||||
self.changed = True
|
||||
|
||||
def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
"""Removes a develop spec from the manifest file
|
||||
|
||||
Args:
|
||||
pkg_name: package to be removed from development
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if there is nothing to remove
|
||||
"""
|
||||
try:
|
||||
del self.pristine_configuration["develop"][pkg_name]
|
||||
except KeyError as e:
|
||||
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
del self.configuration["develop"][pkg_name]
|
||||
self.changed = True
|
||||
|
||||
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
"""Normalizes the dev paths in the environment with respect to the directory where the
|
||||
initialization file resides.
|
||||
|
||||
Args:
|
||||
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
|
||||
"""
|
||||
init_file_dir = pathlib.Path(init_file_dir).absolute()
|
||||
for _, entry in self.pristine_configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
|
||||
for _, entry in self.configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
self.changed = True
|
||||
|
||||
def flush(self) -> None:
|
||||
"""Synchronizes the object with the manifest file on disk."""
|
||||
if not self.changed:
|
||||
|
@@ -6,11 +6,13 @@
|
||||
for Spack's command extensions.
|
||||
"""
|
||||
import difflib
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -75,6 +77,15 @@ def load_command_extension(command, path):
|
||||
if not os.path.exists(cmd_path):
|
||||
return None
|
||||
|
||||
ensure_extension_loaded(extension, path=path)
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
sys.modules[module_name] = module
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def ensure_extension_loaded(extension, *, path):
|
||||
def ensure_package_creation(name):
|
||||
package_name = "{0}.{1}".format(__name__, name)
|
||||
if package_name in sys.modules:
|
||||
@@ -100,10 +111,22 @@ def ensure_package_creation(name):
|
||||
ensure_package_creation(extension)
|
||||
ensure_package_creation(extension + ".cmd")
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
sys.modules[module_name] = module
|
||||
|
||||
return module
|
||||
def load_extension(name: str) -> str:
|
||||
"""Loads a single extension into the 'spack.extensions' package.
|
||||
|
||||
Args:
|
||||
name: name of the extension
|
||||
"""
|
||||
extension_root = path_for_extension(name, paths=get_extension_paths())
|
||||
ensure_extension_loaded(name, path=extension_root)
|
||||
commands = glob.glob(
|
||||
os.path.join(extension_root, extension_name(extension_root), "cmd", "*.py")
|
||||
)
|
||||
commands = [os.path.basename(x).rstrip(".py") for x in commands]
|
||||
for command in commands:
|
||||
load_command_extension(command, extension_root)
|
||||
return extension_root
|
||||
|
||||
|
||||
def get_extension_paths():
|
||||
@@ -125,7 +148,7 @@ def get_command_paths():
|
||||
return command_paths
|
||||
|
||||
|
||||
def path_for_extension(target_name, *paths):
|
||||
def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
||||
"""Return the test root dir for a given extension.
|
||||
|
||||
Args:
|
||||
|
@@ -357,7 +357,8 @@ def _print_installed_pkg(message: str) -> None:
|
||||
Args:
|
||||
message (str): message to be output
|
||||
"""
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
if tty.msg_enabled():
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
|
||||
|
||||
def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
@@ -380,7 +381,10 @@ def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None:
|
||||
|
||||
|
||||
def _install_from_cache(
|
||||
pkg: "spack.package_base.PackageBase", cache_only: bool, explicit: bool, unsigned: bool = False
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
cache_only: bool,
|
||||
explicit: bool,
|
||||
unsigned: Optional[bool] = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Extract the package from binary cache
|
||||
@@ -390,8 +394,7 @@ def _install_from_cache(
|
||||
cache_only: only extract from binary cache
|
||||
explicit: ``True`` if installing the package was explicitly
|
||||
requested by the user, otherwise, ``False``
|
||||
unsigned: ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
|
||||
Return: ``True`` if the package was extract from binary cache, ``False`` otherwise
|
||||
"""
|
||||
@@ -461,7 +464,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
def _process_binary_cache_tarball(
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
explicit: bool,
|
||||
unsigned: bool,
|
||||
unsigned: Optional[bool],
|
||||
mirrors_for_spec: Optional[list] = None,
|
||||
timer: timer.BaseTimer = timer.NULL_TIMER,
|
||||
) -> bool:
|
||||
@@ -471,8 +474,7 @@ def _process_binary_cache_tarball(
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
explicit: the package was explicitly requested by the user
|
||||
unsigned: ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
mirrors_for_spec: Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
timer: timer to keep track of binary install phases.
|
||||
@@ -492,9 +494,7 @@ def _process_binary_cache_tarball(
|
||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
||||
)
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
@@ -504,7 +504,7 @@ def _process_binary_cache_tarball(
|
||||
def _try_install_from_binary_cache(
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
explicit: bool,
|
||||
unsigned: bool = False,
|
||||
unsigned: Optional[bool] = None,
|
||||
timer: timer.BaseTimer = timer.NULL_TIMER,
|
||||
) -> bool:
|
||||
"""
|
||||
@@ -513,8 +513,7 @@ def _try_install_from_binary_cache(
|
||||
Args:
|
||||
pkg: package to be extracted from binary cache
|
||||
explicit: the package was explicitly requested by the user
|
||||
unsigned: ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
timer: timer to keep track of binary install phases.
|
||||
"""
|
||||
# Early exit if no binary mirrors are configured.
|
||||
@@ -824,7 +823,7 @@ def _add_default_args(self) -> None:
|
||||
("restage", False),
|
||||
("skip_patch", False),
|
||||
("tests", False),
|
||||
("unsigned", False),
|
||||
("unsigned", None),
|
||||
("verbose", False),
|
||||
]:
|
||||
_ = self.install_args.setdefault(arg, default)
|
||||
@@ -1662,7 +1661,7 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
use_cache = task.use_cache
|
||||
tests = install_args.get("tests", False)
|
||||
assert isinstance(tests, (bool, list)) # make mypy happy.
|
||||
unsigned = bool(install_args.get("unsigned"))
|
||||
unsigned: Optional[bool] = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = task.pkg, task.pkg_id
|
||||
|
||||
@@ -2007,7 +2006,9 @@ def install(self) -> None:
|
||||
|
||||
# Only enable the terminal status line when we're in a tty without debug info
|
||||
# enabled, so that the output does not get cluttered.
|
||||
term_status = TermStatusLine(enabled=sys.stdout.isatty() and not tty.is_debug())
|
||||
term_status = TermStatusLine(
|
||||
enabled=sys.stdout.isatty() and tty.msg_enabled() and not tty.is_debug()
|
||||
)
|
||||
|
||||
while self.build_pq:
|
||||
task = self._pop_task()
|
||||
|
@@ -133,6 +133,10 @@ def binary(self):
|
||||
def source(self):
|
||||
return isinstance(self._data, str) or self._data.get("source", True)
|
||||
|
||||
@property
|
||||
def signed(self) -> bool:
|
||||
return isinstance(self._data, str) or self._data.get("signed", True)
|
||||
|
||||
@property
|
||||
def fetch_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
@@ -146,7 +150,7 @@ def push_url(self):
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source"]
|
||||
keys += ["binary", "source", "signed"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
|
@@ -93,7 +93,7 @@ def _filter_compiler_wrappers_impl(pkg_or_builder):
|
||||
replacements = []
|
||||
|
||||
for idx, (env_var, compiler_path) in enumerate(compiler_vars):
|
||||
if env_var in os.environ:
|
||||
if env_var in os.environ and compiler_path is not None:
|
||||
# filter spack wrapper and links to spack wrapper in case
|
||||
# build system runs realpath
|
||||
wrapper = os.environ[env_var]
|
||||
|
@@ -5,11 +5,20 @@
|
||||
from ._operating_system import OperatingSystem
|
||||
from .cray_backend import CrayBackend
|
||||
from .cray_frontend import CrayFrontend
|
||||
from .freebsd import FreeBSDOs
|
||||
from .linux_distro import LinuxDistro
|
||||
from .mac_os import MacOs
|
||||
from .windows_os import WindowsOs
|
||||
|
||||
__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "CrayFrontend", "CrayBackend", "WindowsOs"]
|
||||
__all__ = [
|
||||
"OperatingSystem",
|
||||
"LinuxDistro",
|
||||
"MacOs",
|
||||
"CrayFrontend",
|
||||
"CrayBackend",
|
||||
"WindowsOs",
|
||||
"FreeBSDOs",
|
||||
]
|
||||
|
||||
#: List of all the Operating Systems known to Spack
|
||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs]
|
||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs, FreeBSDOs]
|
||||
|
15
lib/spack/spack/operating_systems/freebsd.py
Normal file
15
lib/spack/spack/operating_systems/freebsd.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
from ._operating_system import OperatingSystem
|
||||
|
||||
|
||||
class FreeBSDOs(OperatingSystem):
|
||||
def __init__(self):
|
||||
release = py_platform.release().split("-", 1)[0]
|
||||
super().__init__("freebsd", Version(release))
|
@@ -1035,15 +1035,26 @@ def _make_stage(self):
|
||||
# To fetch the current version
|
||||
source_stage = self._make_root_stage(self.fetcher)
|
||||
|
||||
# Extend it with all resources and patches
|
||||
# all_stages is source + resources + patches
|
||||
all_stages = StageComposite()
|
||||
all_stages.append(source_stage)
|
||||
all_stages.extend(
|
||||
self._make_resource_stage(source_stage, r) for r in self._get_needed_resources()
|
||||
)
|
||||
all_stages.extend(
|
||||
p.stage for p in self.spec.patches if isinstance(p, spack.patch.UrlPatch)
|
||||
)
|
||||
if self.spec.concrete:
|
||||
all_stages.extend(
|
||||
p.stage for p in self.spec.patches if isinstance(p, spack.patch.UrlPatch)
|
||||
)
|
||||
else:
|
||||
# The only code path that gets here is spack mirror create --all which just needs all
|
||||
# matching patches.
|
||||
all_stages.extend(
|
||||
p.stage
|
||||
for when_spec, patch_list in self.patches.items()
|
||||
if self.spec.intersects(when_spec)
|
||||
for p in patch_list
|
||||
if isinstance(p, spack.patch.UrlPatch)
|
||||
)
|
||||
return all_stages
|
||||
|
||||
@property
|
||||
@@ -1743,28 +1754,16 @@ def _if_ninja_target_execute(self, target, *args, **kwargs):
|
||||
inspect.getmodule(self).ninja(target, *args, **kwargs)
|
||||
|
||||
def _get_needed_resources(self):
|
||||
resources = []
|
||||
# Select the resources that are needed for this build
|
||||
if self.spec.concrete:
|
||||
for when_spec, resource_list in self.resources.items():
|
||||
if when_spec in self.spec:
|
||||
resources.extend(resource_list)
|
||||
else:
|
||||
for when_spec, resource_list in self.resources.items():
|
||||
# Note that variant checking is always strict for specs where
|
||||
# the name is not specified. But with strict variant checking,
|
||||
# only variants mentioned in 'other' are checked. Here we only
|
||||
# want to make sure that no constraints in when_spec
|
||||
# conflict with the spec, so we need to invoke
|
||||
# when_spec.satisfies(self.spec) vs.
|
||||
# self.spec.satisfies(when_spec)
|
||||
if when_spec.intersects(self.spec):
|
||||
resources.extend(resource_list)
|
||||
# Sorts the resources by the length of the string representing their
|
||||
# destination. Since any nested resource must contain another
|
||||
# resource's name in its path, it seems that should work
|
||||
resources = sorted(resources, key=lambda res: len(res.destination))
|
||||
return resources
|
||||
# We use intersects here cause it would also work if self.spec is abstract
|
||||
resources = [
|
||||
resource
|
||||
for when_spec, resource_list in self.resources.items()
|
||||
if self.spec.intersects(when_spec)
|
||||
for resource in resource_list
|
||||
]
|
||||
# Sorts the resources by the length of the string representing their destination. Since any
|
||||
# nested resource must contain another resource's path, that should work
|
||||
return sorted(resources, key=lambda res: len(res.destination))
|
||||
|
||||
def _resource_stage(self, resource):
|
||||
pieces = ["resource", resource.name, self.spec.dag_hash()]
|
||||
|
@@ -58,6 +58,7 @@
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import enum
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
@@ -95,13 +96,55 @@
|
||||
else:
|
||||
FILENAME = WINDOWS_FILENAME
|
||||
|
||||
#: These are legal values that *can* be parsed bare, without quotes on the command line.
|
||||
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
|
||||
#: Variant/flag values that match this can be left unquoted in Spack output
|
||||
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
|
||||
|
||||
#: Quoted values can be *anything* in between quotes, including escaped quotes.
|
||||
QUOTED_VALUE = r"(?:'(?:[^']|(?<=\\)')*'|\"(?:[^\"]|(?<=\\)\")*\")"
|
||||
|
||||
VERSION = r"=?(?:[a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
|
||||
VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
||||
|
||||
#: Regex to strip quotes. Group 2 will be the unquoted string.
|
||||
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
|
||||
|
||||
|
||||
def strip_quotes_and_unescape(string: str) -> str:
|
||||
"""Remove surrounding single or double quotes from string, if present."""
|
||||
match = STRIP_QUOTES.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
# replace any escaped quotes with bare quotes
|
||||
quote, result = match.groups()
|
||||
return result.replace(rf"\{quote}", quote)
|
||||
|
||||
|
||||
def quote_if_needed(value: str) -> str:
|
||||
"""Add quotes around the value if it requires quotes.
|
||||
|
||||
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
|
||||
|
||||
This adds:
|
||||
* single quotes by default
|
||||
* double quotes around any value that contains single quotes
|
||||
|
||||
If double quotes are used, we json-escpae the string. That is, we escape ``\\``,
|
||||
``"``, and control codes.
|
||||
|
||||
"""
|
||||
if NO_QUOTES_NEEDED.match(value):
|
||||
return value
|
||||
|
||||
return json.dumps(value) if "'" in value else f"'{value}'"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
"""Base class for an enum type with a regex value"""
|
||||
@@ -138,8 +181,8 @@ class TokenType(TokenBase):
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}\s*==\s*(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}\s*=\s*(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
@@ -351,12 +394,14 @@ def parse(
|
||||
# accept another package name afterwards in a node
|
||||
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
|
||||
initial_spec.name = self.ctx.current_token.value
|
||||
|
||||
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||
parts = self.ctx.current_token.value.split(".")
|
||||
name = parts[-1]
|
||||
namespace = ".".join(parts[:-1])
|
||||
initial_spec.name = name
|
||||
initial_spec.namespace = namespace
|
||||
|
||||
elif self.ctx.accept(TokenType.FILENAME):
|
||||
return FileParser(self.ctx).parse(initial_spec)
|
||||
|
||||
@@ -370,6 +415,7 @@ def parse(
|
||||
compiler_name = self.ctx.current_token.value[1:]
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
@@ -381,6 +427,7 @@ def parse(
|
||||
compiler_name.strip(), compiler_version
|
||||
)
|
||||
self.has_compiler = True
|
||||
|
||||
elif (
|
||||
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(TokenType.GIT_VERSION)
|
||||
@@ -395,31 +442,39 @@ def parse(
|
||||
)
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, delim, value = match.groups()
|
||||
initial_spec._add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=True)
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, delim, value = match.groups()
|
||||
initial_spec._add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||
|
||||
elif self.ctx.expect(TokenType.DAG_HASH):
|
||||
if initial_spec.abstract_hash:
|
||||
break
|
||||
self.ctx.accept(TokenType.DAG_HASH)
|
||||
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
|
@@ -8,6 +8,7 @@
|
||||
from ._platform import Platform
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
from .test import Test
|
||||
from .windows import Windows
|
||||
@@ -17,6 +18,7 @@
|
||||
"Cray",
|
||||
"Darwin",
|
||||
"Linux",
|
||||
"FreeBSD",
|
||||
"Test",
|
||||
"Windows",
|
||||
"platforms",
|
||||
|
@@ -10,12 +10,13 @@
|
||||
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
from .test import Test
|
||||
from .windows import Windows
|
||||
|
||||
#: List of all the platform classes known to Spack
|
||||
platforms = [Cray, Darwin, Linux, Windows, Test]
|
||||
platforms = [Cray, Darwin, Linux, Windows, FreeBSD, Test]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
37
lib/spack/spack/platforms/freebsd.py
Normal file
37
lib/spack/spack/platforms/freebsd.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.target
|
||||
from spack.operating_systems.freebsd import FreeBSDOs
|
||||
|
||||
from ._platform import Platform
|
||||
|
||||
|
||||
class FreeBSD(Platform):
|
||||
priority = 102
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("freebsd")
|
||||
|
||||
for name in archspec.cpu.TARGETS:
|
||||
self.add_target(name, spack.target.Target(name))
|
||||
|
||||
# Get specific default
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
os = FreeBSDOs()
|
||||
self.default_os = str(os)
|
||||
self.front_os = self.default_os
|
||||
self.back_os = self.default_os
|
||||
self.add_operating_system(str(os), os)
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
return platform.system().lower() == "freebsd"
|
@@ -32,13 +32,26 @@
|
||||
from .extract import extract_test_parts
|
||||
|
||||
# Mapping Spack phases to the corresponding CTest/CDash phase.
|
||||
# TODO: Some of the phases being lumped into configure in the CDash tables
|
||||
# TODO: really belong in a separate column, such as "Setup".
|
||||
# TODO: Would also be nice to have `stage` as a separate phase that could
|
||||
# TODO: be lumped into that new column instead of configure, for example.
|
||||
MAP_PHASES_TO_CDASH = {
|
||||
"autoreconf": "configure",
|
||||
"cmake": "configure",
|
||||
"configure": "configure",
|
||||
"edit": "configure",
|
||||
"autoreconf": "configure", # AutotoolsBuilder
|
||||
"bootstrap": "configure", # CMakeBuilder
|
||||
"build": "build",
|
||||
"build_processes": "build", # Openloops
|
||||
"cmake": "configure", # CMakeBuilder
|
||||
"configure": "configure",
|
||||
"edit": "configure", # MakefileBuilder
|
||||
"generate_luarocks_config": "configure", # LuaBuilder
|
||||
"hostconfig": "configure", # Lvarray
|
||||
"initconfig": "configure", # CachedCMakeBuilder
|
||||
"install": "build",
|
||||
"meson": "configure", # MesonBuilder
|
||||
"preprocess": "configure", # LuaBuilder
|
||||
"qmake": "configure", # QMakeBuilder
|
||||
"unpack": "configure", # LuaBuilder
|
||||
}
|
||||
|
||||
# Initialize data structures common to each phase's report.
|
||||
@@ -92,11 +105,12 @@ def __init__(self, configuration: CDashConfiguration):
|
||||
self.osname = platform.system()
|
||||
self.osrelease = platform.release()
|
||||
self.target = spack.platforms.host().target("default_target")
|
||||
self.endtime = int(time.time())
|
||||
self.starttime = int(time.time())
|
||||
self.endtime = self.starttime
|
||||
self.buildstamp = (
|
||||
configuration.buildstamp
|
||||
if configuration.buildstamp
|
||||
else build_stamp(configuration.track, self.endtime)
|
||||
else build_stamp(configuration.track, self.starttime)
|
||||
)
|
||||
self.buildIds: Dict[str, str] = {}
|
||||
self.revision = ""
|
||||
@@ -125,7 +139,7 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
report_data[phase] = {}
|
||||
report_data[phase]["loglines"] = []
|
||||
report_data[phase]["status"] = 0
|
||||
report_data[phase]["endtime"] = self.endtime
|
||||
report_data[phase]["starttime"] = self.starttime
|
||||
|
||||
# Track the phases we perform so we know what reports to create.
|
||||
# We always report the update step because this is how we tell CDash
|
||||
@@ -153,6 +167,25 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
elif cdash_phase:
|
||||
report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line))
|
||||
|
||||
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
|
||||
# "update" was encounterd.
|
||||
# dump the report in the configure line so teams can see what the issue is
|
||||
if len(phases_encountered) == 1 and package["exception"]:
|
||||
# TODO this mapping is not ideal since these are pre-configure errors
|
||||
# we need to determine if a more appropriate cdash phase can be utilized
|
||||
# for now we will add a message to the log explaining this
|
||||
cdash_phase = "configure"
|
||||
phases_encountered.append(cdash_phase)
|
||||
|
||||
log_message = (
|
||||
"Pre-configure errors occured in Spack's process that terminated the "
|
||||
"build process prematurely.\nSpack output::\n{0}".format(
|
||||
xml.sax.saxutils.escape(package["exception"])
|
||||
)
|
||||
)
|
||||
|
||||
report_data[cdash_phase]["loglines"].append(log_message)
|
||||
|
||||
# Move the build phase to the front of the list if it occurred.
|
||||
# This supports older versions of CDash that expect this phase
|
||||
# to be reported before all others.
|
||||
@@ -160,9 +193,9 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
build_pos = phases_encountered.index("build")
|
||||
phases_encountered.insert(0, phases_encountered.pop(build_pos))
|
||||
|
||||
self.starttime = self.endtime - duration
|
||||
self.endtime = self.starttime + duration
|
||||
for phase in phases_encountered:
|
||||
report_data[phase]["starttime"] = self.starttime
|
||||
report_data[phase]["endtime"] = self.endtime
|
||||
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
|
||||
errors, warnings = parse_log_events(report_data[phase]["loglines"])
|
||||
|
||||
@@ -309,7 +342,7 @@ def test_report_for_package(self, report_dir, package, duration):
|
||||
self.buildname = "{0}-{1}".format(self.current_package_name, package["id"])
|
||||
else:
|
||||
self.buildname = self.report_build_name(self.current_package_name)
|
||||
self.starttime = self.endtime - duration
|
||||
self.endtime = self.starttime + duration
|
||||
|
||||
report_data = self.initialize_report(report_dir)
|
||||
report_data["hostname"] = socket.gethostname()
|
||||
@@ -354,7 +387,7 @@ def concretization_report(self, report_dir, msg):
|
||||
self.buildname = self.base_buildname
|
||||
report_data = self.initialize_report(report_dir)
|
||||
report_data["update"] = {}
|
||||
report_data["update"]["starttime"] = self.endtime
|
||||
report_data["update"]["starttime"] = self.starttime
|
||||
report_data["update"]["endtime"] = self.endtime
|
||||
report_data["update"]["revision"] = self.revision
|
||||
report_data["update"]["log"] = msg
|
||||
|
34
lib/spack/spack/schema/develop.py
Normal file
34
lib/spack/spack/schema/develop.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
properties = {
|
||||
"develop": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
r"\w[\w-]*": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
return False
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack repository configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
@@ -37,21 +37,6 @@
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"develop": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
r"\w[\w-]*": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"spec": {"type": "string"},
|
||||
"path": {"type": "string"},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"specs": spack.schema.spec_list_schema,
|
||||
"view": {
|
||||
"anyOf": [
|
||||
|
@@ -18,6 +18,7 @@
|
||||
import spack.schema.config
|
||||
import spack.schema.container
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -34,6 +35,7 @@
|
||||
spack.schema.container.properties,
|
||||
spack.schema.ci.properties,
|
||||
spack.schema.definitions.properties,
|
||||
spack.schema.develop.properties,
|
||||
spack.schema.mirrors.properties,
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
|
@@ -42,6 +42,7 @@
|
||||
"properties": {
|
||||
"source": {"type": "boolean"},
|
||||
"binary": {"type": "boolean"},
|
||||
"signed": {"type": "boolean"},
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
**connection, # type: ignore
|
||||
|
@@ -7,6 +7,7 @@
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/packages.py
|
||||
:lines: 13-
|
||||
"""
|
||||
import spack.schema.environment
|
||||
|
||||
permissions = {
|
||||
"type": "object",
|
||||
@@ -155,7 +156,13 @@
|
||||
"spec": {"type": "string"},
|
||||
"prefix": {"type": "string"},
|
||||
"modules": {"type": "array", "items": {"type": "string"}},
|
||||
"extra_attributes": {"type": "object"},
|
||||
"extra_attributes": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"environment": spack.schema.environment.definition
|
||||
},
|
||||
},
|
||||
},
|
||||
"additionalProperties": True,
|
||||
"required": ["spec"],
|
||||
|
@@ -11,13 +11,17 @@
|
||||
import pathlib
|
||||
import pprint
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
from typing import Callable, Dict, List, NamedTuple, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.config as sc
|
||||
import spack.deptypes as dt
|
||||
import spack.paths as sp
|
||||
import spack.util.path as sup
|
||||
|
||||
try:
|
||||
import clingo # type: ignore[import]
|
||||
@@ -27,6 +31,36 @@
|
||||
except ImportError:
|
||||
clingo = None # type: ignore
|
||||
clingo_cffi = False
|
||||
except AttributeError:
|
||||
# Reaching this point indicates a broken clingo installation
|
||||
# If Spack derived clingo, suggest user re-run bootstrap
|
||||
# if non-spack, suggest user investigate installation
|
||||
|
||||
# assume Spack is not responsibe for broken clingo
|
||||
msg = (
|
||||
f"Clingo installation at {clingo.__file__} is incomplete or invalid."
|
||||
"Please repair installation or re-install. "
|
||||
"Alternatively, consider installing clingo via Spack."
|
||||
)
|
||||
# check whether Spack is responsible
|
||||
if (
|
||||
pathlib.Path(
|
||||
sup.canonicalize_path(sc.get("bootstrap:root", sp.default_user_bootstrap_path))
|
||||
)
|
||||
in pathlib.Path(clingo.__file__).parents
|
||||
):
|
||||
# Spack is responsible for the broken clingo
|
||||
msg = (
|
||||
"Spack bootstrapped copy of Clingo is broken, "
|
||||
"please re-run the bootstrapping process via command `spack bootstrap now`."
|
||||
" If this issue persists, please file a bug at: github.com/spack/spack"
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Clingo installation may be broken or incomplete, "
|
||||
"please verify clingo has been installed correctly"
|
||||
"\n\nClingo does not provide symbol clingo.Symbol"
|
||||
f"{msg}"
|
||||
)
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -61,6 +95,8 @@
|
||||
ASTType = None
|
||||
parse_files = None
|
||||
|
||||
#: Enable the addition of a runtime node
|
||||
WITH_RUNTIME = sys.platform != "win32"
|
||||
|
||||
#: Data class that contain configuration on what a
|
||||
#: clingo solve should output.
|
||||
@@ -122,6 +158,8 @@ class Provenance(enum.IntEnum):
|
||||
PACKAGE_PY = enum.auto()
|
||||
# An installed spec
|
||||
INSTALLED = enum.auto()
|
||||
# A runtime injected from another package (e.g. a compiler)
|
||||
RUNTIME = enum.auto()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._name_.lower()}"
|
||||
@@ -2023,7 +2061,9 @@ class Body:
|
||||
f.node_compiler_version(spec.name, spec.compiler.name, spec.compiler.version)
|
||||
)
|
||||
|
||||
elif spec.compiler.versions:
|
||||
elif spec.compiler.versions and spec.compiler.versions != vn.any_version:
|
||||
# The condition above emits a facts only if we have an actual constraint
|
||||
# on the compiler version, and avoids emitting them if any version is fine
|
||||
clauses.append(
|
||||
fn.attr(
|
||||
"node_compiler_version_satisfies",
|
||||
@@ -2578,6 +2618,9 @@ def setup(
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
self.pkgs.update(set(runtimes))
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
@@ -2678,6 +2721,10 @@ def setup(
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
if WITH_RUNTIME:
|
||||
self.gen.h1("Runtimes")
|
||||
self.define_runtime_constraints()
|
||||
|
||||
self.gen.h1("Version Constraints")
|
||||
self.collect_virtual_constraints()
|
||||
self.define_version_constraints()
|
||||
@@ -2688,6 +2735,21 @@ def setup(
|
||||
self.gen.h1("Target Constraints")
|
||||
self.define_target_constraints()
|
||||
|
||||
def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
for compiler in self.possible_compilers:
|
||||
if compiler.name != "gcc":
|
||||
continue
|
||||
try:
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler.name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
continue
|
||||
if hasattr(compiler_cls, "runtime_constraints"):
|
||||
compiler_cls.runtime_constraints(compiler=compiler, pkg=recorder)
|
||||
|
||||
recorder.consume_facts()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in specs:
|
||||
self.gen.h2("Spec: %s" % str(spec))
|
||||
@@ -2697,7 +2759,7 @@ def literal_specs(self, specs):
|
||||
# Special condition triggered by "literal_solved"
|
||||
self.gen.fact(fn.literal(trigger_id))
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, f"{spec} requested from CLI"))
|
||||
self.gen.fact(fn.condition_reason(condition_id, f"{spec} requested explicitly"))
|
||||
|
||||
imposed_spec_key = str(spec), None
|
||||
cache = self._effect_cache[spec.name]
|
||||
@@ -2796,6 +2858,157 @@ def _specs_from_requires(self, pkg_name, section):
|
||||
yield _spec_with_default_name(s, pkg_name)
|
||||
|
||||
|
||||
class RuntimePropertyRecorder:
|
||||
"""An object of this class is injected in callbacks to compilers, to let them declare
|
||||
properties of the runtimes they support and of the runtimes they provide, and to add
|
||||
runtime dependencies to the nodes using said compiler.
|
||||
|
||||
The usage of the object is the following. First, a runtime package name or the wildcard
|
||||
"*" are passed as an argument to __call__, to set which kind of package we are referring to.
|
||||
Then we can call one method with a directive-like API.
|
||||
|
||||
Examples:
|
||||
>>> pkg = RuntimePropertyRecorder(setup)
|
||||
>>> # Every package compiled with %gcc has a link dependency on 'gcc-runtime'
|
||||
>>> pkg("*").depends_on(
|
||||
... "gcc-runtime",
|
||||
... when="%gcc",
|
||||
... type="link",
|
||||
... description="If any package uses %gcc, it depends on gcc-runtime"
|
||||
... )
|
||||
>>> # The version of gcc-runtime is the same as the %gcc used to "compile" it
|
||||
>>> pkg("gcc-runtime").requires("@=9.4.0", when="%gcc@=9.4.0")
|
||||
"""
|
||||
|
||||
def __init__(self, setup):
|
||||
self._setup = setup
|
||||
self.rules = []
|
||||
self.runtime_conditions = set()
|
||||
# State of this object set in the __call__ method, and reset after
|
||||
# each directive-like method
|
||||
self.current_package = None
|
||||
|
||||
def __call__(self, package_name: str) -> "RuntimePropertyRecorder":
|
||||
"""Sets a package name for the next directive-like method call"""
|
||||
assert self.current_package is None, f"state was already set to '{self.current_package}'"
|
||||
self.current_package = package_name
|
||||
return self
|
||||
|
||||
def reset(self):
|
||||
"""Resets the current state."""
|
||||
self.current_package = None
|
||||
|
||||
def depends_on(self, dependency_str: str, *, when: str, type: str, description: str) -> None:
|
||||
"""Injects conditional dependencies on packages.
|
||||
|
||||
Args:
|
||||
dependency_str: the dependency spec to inject
|
||||
when: anonymous condition to be met on a package to have the dependency
|
||||
type: dependency type
|
||||
description: human-readable description of the rule for adding the dependency
|
||||
"""
|
||||
# TODO: The API for this function is not final, and is still subject to change. At
|
||||
# TODO: the moment, we implemented only the features strictly needed for the
|
||||
# TODO: functionality currently provided by Spack, and we assert nothing else is required.
|
||||
msg = "the 'depends_on' method can be called only with pkg('*')"
|
||||
assert self.current_package == "*", msg
|
||||
|
||||
when_spec = spack.spec.Spec(when)
|
||||
assert when_spec.name is None, "only anonymous when specs are accepted"
|
||||
|
||||
dependency_spec = spack.spec.Spec(dependency_str)
|
||||
if dependency_spec.versions != vn.any_version:
|
||||
self._setup.version_constraints.add((dependency_spec.name, dependency_spec.versions))
|
||||
|
||||
placeholder = "XXX"
|
||||
node_variable = "node(ID, Package)"
|
||||
when_spec.name = placeholder
|
||||
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{placeholder}"', f"{node_variable}")
|
||||
head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
|
||||
|
||||
runtime_pkg = dependency_spec.name
|
||||
main_rule = (
|
||||
f"% {description}\n"
|
||||
f'1 {{ attr("depends_on", {node_variable}, node(0..X-1, "{runtime_pkg}"), "{type}") :'
|
||||
f' max_dupes("gcc-runtime", X)}} 1:-\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
self.rules.append(main_rule)
|
||||
for clause in head_clauses:
|
||||
if clause.args[0] == "node":
|
||||
continue
|
||||
runtime_node = f'node(RuntimeID, "{runtime_pkg}")'
|
||||
head_str = str(clause).replace(f'"{runtime_pkg}"', runtime_node)
|
||||
rule = (
|
||||
f"{head_str} :-\n"
|
||||
f' attr("depends_on", {node_variable}, {runtime_node}, "{type}"),\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
self.rules.append(rule)
|
||||
|
||||
self.reset()
|
||||
|
||||
def requires(self, impose: str, *, when: str):
|
||||
"""Injects conditional requirements on a given package.
|
||||
|
||||
Args:
|
||||
impose: constraint to be imposed
|
||||
when: condition triggering the constraint
|
||||
"""
|
||||
msg = "the 'requires' method cannot be called with pkg('*') or without setting the package"
|
||||
assert self.current_package is not None and self.current_package != "*", msg
|
||||
|
||||
imposed_spec = spack.spec.Spec(f"{self.current_package}{impose}")
|
||||
when_spec = spack.spec.Spec(f"{self.current_package}{when}")
|
||||
|
||||
assert imposed_spec.versions.concrete, f"{impose} must have a concrete version"
|
||||
assert when_spec.compiler.concrete, f"{when} must have a concrete compiler"
|
||||
|
||||
# Add versions to possible versions
|
||||
for s in (imposed_spec, when_spec):
|
||||
if not s.versions.concrete:
|
||||
continue
|
||||
self._setup.possible_versions[s.name].add(s.version)
|
||||
self._setup.declared_versions[s.name].append(
|
||||
DeclaredVersion(version=s.version, idx=0, origin=Provenance.RUNTIME)
|
||||
)
|
||||
|
||||
self.runtime_conditions.add((imposed_spec, when_spec))
|
||||
self.reset()
|
||||
|
||||
def consume_facts(self):
|
||||
"""Consume the facts collected by this object, and emits rules and
|
||||
facts for the runtimes.
|
||||
"""
|
||||
self._setup.gen.h2("Runtimes: rules")
|
||||
self._setup.gen.newline()
|
||||
for rule in self.rules:
|
||||
if not isinstance(self._setup.gen.out, llnl.util.lang.Devnull):
|
||||
self._setup.gen.out.write(rule)
|
||||
self._setup.gen.control.add("base", [], rule)
|
||||
|
||||
self._setup.gen.h2("Runtimes: conditions")
|
||||
for runtime_pkg in spack.repo.PATH.packages_with_tags("runtime"):
|
||||
self._setup.gen.fact(fn.runtime(runtime_pkg))
|
||||
self._setup.gen.fact(fn.possible_in_link_run(runtime_pkg))
|
||||
self._setup.gen.newline()
|
||||
# Inject version rules for runtimes (versions are declared based
|
||||
# on the available compilers)
|
||||
self._setup.pkg_version_rules(runtime_pkg)
|
||||
|
||||
for imposed_spec, when_spec in self.runtime_conditions:
|
||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||
|
||||
self._setup.trigger_rules()
|
||||
self._setup.effect_rules()
|
||||
|
||||
|
||||
class SpecBuilder:
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
|
||||
@@ -3134,14 +3347,38 @@ def _develop_specs_from_env(spec, env):
|
||||
spec.constrain(dev_info["spec"])
|
||||
|
||||
|
||||
def _is_reusable_external(packages, spec: spack.spec.Spec) -> bool:
|
||||
"""Returns true iff spec is an external that can be reused.
|
||||
def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
"""A spec is reusable if it's not a dev spec, it's imported from the cray manifest, it's not
|
||||
external, or it's external with matching packages.yaml entry. The latter prevents two issues:
|
||||
|
||||
1. Externals in build caches: avoid installing an external on the build machine not
|
||||
available on the target machine
|
||||
2. Local externals: avoid reusing an external if the local config changes. This helps in
|
||||
particular when a user removes an external from packages.yaml, and expects that that
|
||||
takes effect immediately.
|
||||
|
||||
Arguments:
|
||||
packages: the packages configuration
|
||||
spec: the spec to check
|
||||
packages: the packages configuration
|
||||
"""
|
||||
for name in {spec.name, *(p.name for p in spec.package.provided)}:
|
||||
if "dev_path" in spec.variants:
|
||||
return False
|
||||
|
||||
if not spec.external:
|
||||
return True
|
||||
|
||||
# Cray external manifest externals are always reusable
|
||||
if local:
|
||||
_, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
|
||||
if record and record.origin == "external-db":
|
||||
return True
|
||||
|
||||
try:
|
||||
provided = [p.name for p in spec.package.provided]
|
||||
except spack.repo.RepoError:
|
||||
provided = []
|
||||
|
||||
for name in {spec.name, *provided}:
|
||||
for entry in packages.get(name, {}).get("externals", []):
|
||||
if (
|
||||
spec.satisfies(entry["spec"])
|
||||
@@ -3188,29 +3425,21 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
def _reusable_specs(self, specs):
|
||||
reusable_specs = []
|
||||
if self.reuse:
|
||||
packages = spack.config.get("packages")
|
||||
# Specs from the local Database
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
reusable_specs.extend(
|
||||
[
|
||||
s
|
||||
for s in spack.store.STORE.db.query(installed=True)
|
||||
if not s.satisfies("dev_path=*")
|
||||
]
|
||||
s
|
||||
for s in spack.store.STORE.db.query(installed=True)
|
||||
if _is_reusable(s, packages, local=True)
|
||||
)
|
||||
|
||||
# Specs from buildcaches
|
||||
try:
|
||||
# Specs in a build cache that depend on externals are reusable as long as local
|
||||
# config has matching externals. This should guard against picking up binaries
|
||||
# linked against externals not available locally, while still supporting the use
|
||||
# case of distributing binaries across machines with similar externals.
|
||||
packages = spack.config.get("packages")
|
||||
reusable_specs.extend(
|
||||
[
|
||||
s
|
||||
for s in spack.binary_distribution.update_cache_and_get_specs()
|
||||
if not s.external or _is_reusable_external(packages, s)
|
||||
]
|
||||
s
|
||||
for s in spack.binary_distribution.update_cache_and_get_specs()
|
||||
if _is_reusable(s, packages, local=False)
|
||||
)
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
|
@@ -910,19 +910,23 @@ def flags():
|
||||
yield flags
|
||||
|
||||
def __str__(self):
|
||||
sorted_keys = [k for k in sorted(self.keys()) if self[k] != []]
|
||||
cond_symbol = " " if len(sorted_keys) > 0 else ""
|
||||
return (
|
||||
cond_symbol
|
||||
+ " ".join(
|
||||
key
|
||||
+ ('=="' if True in [f.propagate for f in self[key]] else '="')
|
||||
+ " ".join(self[key])
|
||||
+ '"'
|
||||
for key in sorted_keys
|
||||
)
|
||||
+ cond_symbol
|
||||
)
|
||||
sorted_items = sorted((k, v) for k, v in self.items() if v)
|
||||
|
||||
result = ""
|
||||
for flag_type, flags in sorted_items:
|
||||
normal = [f for f in flags if not f.propagate]
|
||||
if normal:
|
||||
result += f" {flag_type}={spack.parser.quote_if_needed(' '.join(normal))}"
|
||||
|
||||
propagated = [f for f in flags if f.propagate]
|
||||
if propagated:
|
||||
result += f" {flag_type}=={spack.parser.quote_if_needed(' '.join(propagated))}"
|
||||
|
||||
# TODO: somehow add this space only if something follows in Spec.format()
|
||||
if sorted_items:
|
||||
result += " "
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _sort_by_dep_types(dspec: DependencySpec):
|
||||
@@ -3128,9 +3132,7 @@ def flat_dependencies(self, **kwargs):
|
||||
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
# Here, the DAG contains two instances of the same package
|
||||
# with inconsistent constraints. Users cannot produce
|
||||
# inconsistent specs like this on the command line: the
|
||||
# parser doesn't allow it. Spack must be broken!
|
||||
# with inconsistent constraints.
|
||||
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) from e
|
||||
|
||||
def index(self, deptype="all"):
|
||||
@@ -4726,6 +4728,20 @@ def build_spec(self):
|
||||
def build_spec(self, value):
|
||||
self._build_spec = value
|
||||
|
||||
def trim(self, dep_name):
|
||||
"""
|
||||
Remove any package that is or provides `dep_name` transitively
|
||||
from this tree. This can also remove other dependencies if
|
||||
they are only present because of `dep_name`.
|
||||
"""
|
||||
for spec in list(self.traverse()):
|
||||
new_dependencies = _EdgeMap() # A new _EdgeMap
|
||||
for pkg_name, edge_list in spec._dependencies.items():
|
||||
for edge in edge_list:
|
||||
if (dep_name not in edge.virtuals) and (not dep_name == edge.spec.name):
|
||||
new_dependencies.add(edge)
|
||||
spec._dependencies = new_dependencies
|
||||
|
||||
def splice(self, other, transitive):
|
||||
"""Splices dependency "other" into this ("target") Spec, and return the
|
||||
result as a concrete Spec.
|
||||
|
@@ -30,6 +30,8 @@ def current_host_platform():
|
||||
current_platform = spack.platforms.Darwin()
|
||||
elif "Windows" in platform.system():
|
||||
current_platform = spack.platforms.Windows()
|
||||
elif "FreeBSD" in platform.system():
|
||||
current_platform = spack.platforms.FreeBSD()
|
||||
return current_platform
|
||||
|
||||
|
||||
|
@@ -285,6 +285,24 @@ def platform_pathsep(pathlist):
|
||||
assert name not in os.environ
|
||||
|
||||
|
||||
def test_external_config_env(mock_packages, mutable_config, working_env):
|
||||
cmake_config = {
|
||||
"externals": [
|
||||
{
|
||||
"spec": "cmake@1.0",
|
||||
"prefix": "/fake/path",
|
||||
"extra_attributes": {"environment": {"set": {"TEST_ENV_VAR_SET": "yes it's set"}}},
|
||||
}
|
||||
]
|
||||
}
|
||||
spack.config.set("packages:cmake", cmake_config)
|
||||
|
||||
cmake_client = spack.spec.Spec("cmake-client").concretized()
|
||||
spack.build_environment.setup_package(cmake_client.package, False)
|
||||
|
||||
assert os.environ["TEST_ENV_VAR_SET"] == "yes it's set"
|
||||
|
||||
|
||||
@pytest.mark.regression("9107")
|
||||
def test_spack_paths_before_module_paths(config, mock_packages, monkeypatch, working_env):
|
||||
s = spack.spec.Spec("cmake")
|
||||
|
@@ -25,7 +25,7 @@ def test_error_when_multiple_specs_are_given():
|
||||
assert "only takes one spec" in output
|
||||
|
||||
|
||||
@pytest.mark.parametrize("args", [("--", "/bin/bash", "-c", "echo test"), ("--",), ()])
|
||||
@pytest.mark.parametrize("args", [("--", "/bin/sh", "-c", "echo test"), ("--",), ()])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_build_env_requires_a_spec(args):
|
||||
output = build_env(*args, fail_on_error=False)
|
||||
@@ -35,7 +35,7 @@ def test_build_env_requires_a_spec(args):
|
||||
_out_file = "env.out"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["sh"])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_dump(shell_as, shell, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
|
@@ -331,3 +331,37 @@ def fake_push(node, push_url, options):
|
||||
|
||||
# Ensure no duplicates
|
||||
assert len(set(packages_to_push)) == len(packages_to_push)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("signed", [True, False])
|
||||
def test_push_and_install_with_mirror_marked_unsigned_does_not_require_extra_flags(
|
||||
tmp_path, mutable_database, mock_gnupghome, signed
|
||||
):
|
||||
"""Tests whether marking a mirror as unsigned makes it possible to push and install to/from
|
||||
it without requiring extra flags on the command line (and no signing keys configured)."""
|
||||
|
||||
# Create a named mirror with signed set to True or False
|
||||
add_flag = "--signed" if signed else "--unsigned"
|
||||
mirror("add", add_flag, "my-mirror", str(tmp_path))
|
||||
spec = mutable_database.query_local("libelf", installed=True)[0]
|
||||
|
||||
# Push
|
||||
if signed:
|
||||
# Need to pass "--unsigned" to override the mirror's default
|
||||
args = ["push", "--update-index", "--unsigned", "my-mirror", f"/{spec.dag_hash()}"]
|
||||
else:
|
||||
# No need to pass "--unsigned" if the mirror is unsigned
|
||||
args = ["push", "--update-index", "my-mirror", f"/{spec.dag_hash()}"]
|
||||
|
||||
buildcache(*args)
|
||||
|
||||
# Install
|
||||
if signed:
|
||||
# Need to pass "--no-check-signature" to avoid install errors
|
||||
kwargs = {"cache_only": True, "unsigned": True}
|
||||
else:
|
||||
# No need to pass "--no-check-signature" if the mirror is unsigned
|
||||
kwargs = {"cache_only": True}
|
||||
|
||||
spec.package.do_uninstall(force=True)
|
||||
spec.package.do_install(**kwargs)
|
||||
|
@@ -2000,7 +2000,7 @@ def test_ci_reproduce(
|
||||
|
||||
install_script = os.path.join(working_dir.strpath, "install.sh")
|
||||
with open(install_script, "w") as fd:
|
||||
fd.write("#!/bin/bash\n\n#fake install\nspack install blah\n")
|
||||
fd.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
|
||||
|
||||
spack_info_file = os.path.join(working_dir.strpath, "spack_info.txt")
|
||||
with open(spack_info_file, "w") as fd:
|
||||
|
@@ -50,8 +50,8 @@ def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
|
||||
[
|
||||
(['coreutils cflags="-O3 -g"'], ["-O3", "-g"], [False, False], []),
|
||||
(['coreutils cflags=="-O3 -g"'], ["-O3", "-g"], [True, True], []),
|
||||
(["coreutils", "cflags=-O3 -g"], ["-O3"], [False], ["g"]),
|
||||
(["coreutils", "cflags==-O3 -g"], ["-O3"], [True], ["g"]),
|
||||
(["coreutils", "cflags=-O3 -g"], ["-O3", "-g"], [False, False], []),
|
||||
(["coreutils", "cflags==-O3 -g"], ["-O3", "-g"], [True, True], []),
|
||||
(["coreutils", "cflags=-O3", "-g"], ["-O3"], [False], ["g"]),
|
||||
],
|
||||
)
|
||||
|
@@ -91,15 +91,10 @@ def test_config_edit(mutable_config, working_env):
|
||||
|
||||
|
||||
def test_config_get_gets_spack_yaml(mutable_mock_env_path):
|
||||
config("get", fail_on_error=False)
|
||||
assert config.returncode == 1
|
||||
|
||||
with ev.create("test") as env:
|
||||
assert "mpileaks" not in config("get")
|
||||
|
||||
env.add("mpileaks")
|
||||
env.write()
|
||||
|
||||
assert "mpileaks" in config("get")
|
||||
|
||||
|
||||
@@ -122,11 +117,6 @@ def test_config_edit_fails_correctly_with_no_env(mutable_mock_env_path):
|
||||
assert "requires a section argument or an active environment" in output
|
||||
|
||||
|
||||
def test_config_get_fails_correctly_with_no_env(mutable_mock_env_path):
|
||||
output = config("get", fail_on_error=False)
|
||||
assert "requires a section argument or an active environment" in output
|
||||
|
||||
|
||||
def test_config_list():
|
||||
output = config("list")
|
||||
assert "compilers" in output
|
||||
@@ -470,7 +460,6 @@ def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
|
||||
|
||||
expected = """ config:
|
||||
dirty: true
|
||||
|
||||
"""
|
||||
assert expected in output
|
||||
|
||||
@@ -497,29 +486,21 @@ def test_config_add_to_env_preserve_comments(mutable_empty_config, mutable_mock_
|
||||
config("add", "config:dirty:true")
|
||||
output = config("get")
|
||||
|
||||
expected = manifest
|
||||
expected += """ config:
|
||||
dirty: true
|
||||
|
||||
"""
|
||||
assert output == expected
|
||||
assert "# comment" in output
|
||||
assert "dirty: true" in output
|
||||
|
||||
|
||||
def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
|
||||
env("create", "test")
|
||||
|
||||
with ev.read("test"):
|
||||
config("add", "config:dirty:true")
|
||||
output = config("get")
|
||||
assert "dirty: true" in output
|
||||
|
||||
with ev.read("test"):
|
||||
config("rm", "config:dirty")
|
||||
output = config("get")
|
||||
|
||||
expected = ev.default_manifest_yaml()
|
||||
expected += """ config: {}
|
||||
|
||||
"""
|
||||
assert output == expected
|
||||
assert "dirty: true" not in output
|
||||
|
||||
|
||||
def test_config_update_config(config_yaml_v015):
|
||||
|
@@ -19,7 +19,7 @@
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config")
|
||||
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "mutable_config")
|
||||
class TestDevelop:
|
||||
def check_develop(self, env, spec, path=None):
|
||||
path = path or spec.name
|
||||
@@ -31,9 +31,9 @@ def check_develop(self, env, spec, path=None):
|
||||
assert dev_specs_entry["spec"] == str(spec)
|
||||
|
||||
# check yaml representation
|
||||
yaml = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
assert spec.name in yaml["develop"]
|
||||
yaml_entry = yaml["develop"][spec.name]
|
||||
dev_config = spack.config.get("develop", {})
|
||||
assert spec.name in dev_config
|
||||
yaml_entry = dev_config[spec.name]
|
||||
assert yaml_entry["spec"] == str(spec)
|
||||
if path == spec.name:
|
||||
# default paths aren't written out
|
||||
@@ -102,7 +102,7 @@ def test_develop_update_spec(self):
|
||||
self.check_develop(e, spack.spec.Spec("mpich@=2.0"))
|
||||
assert len(e.dev_specs) == 1
|
||||
|
||||
def test_develop_canonicalize_path(self, monkeypatch, config):
|
||||
def test_develop_canonicalize_path(self, monkeypatch):
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
path = "../$user"
|
||||
@@ -119,7 +119,7 @@ def check_path(stage, dest):
|
||||
# Check modifications actually worked
|
||||
assert spack.spec.Spec("mpich@1.0").concretized().satisfies("dev_path=%s" % abspath)
|
||||
|
||||
def test_develop_canonicalize_path_no_args(self, monkeypatch, config):
|
||||
def test_develop_canonicalize_path_no_args(self, monkeypatch):
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
path = "$user"
|
||||
|
@@ -10,12 +10,150 @@
|
||||
import spack.main
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
diff_cmd = spack.main.SpackCommand("diff")
|
||||
find_cmd = spack.main.SpackCommand("find")
|
||||
|
||||
|
||||
_p1 = (
|
||||
"p1",
|
||||
"""\
|
||||
class P1(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p1var", default=True)
|
||||
variant("usev1", default=True)
|
||||
|
||||
depends_on("p2")
|
||||
depends_on("v1", when="+usev1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p2 = (
|
||||
"p2",
|
||||
"""\
|
||||
class P2(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p3 = (
|
||||
"p3",
|
||||
"""\
|
||||
class P3(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p3var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
_i1 = (
|
||||
"i1",
|
||||
"""\
|
||||
class I1(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i1var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
_i2 = (
|
||||
"i2",
|
||||
"""\
|
||||
class I2(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p4 = (
|
||||
"p4",
|
||||
"""\
|
||||
class P4(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p4var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
# Note that the hash of p1 will differ depending on the variant chosen
|
||||
# we probably always want to omit that from diffs
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
"""
|
||||
p1____
|
||||
| \
|
||||
p2 v1
|
||||
| ____/ |
|
||||
p3 p4
|
||||
|
||||
i1 and i2 provide v1 (and both have the same dependencies)
|
||||
|
||||
All packages have an associated variant
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_p1, _p2, _p3, _i1, _i2, _p4])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
def test_diff_ignore(test_repo):
|
||||
specA = spack.spec.Spec("p1+usev1").concretized()
|
||||
specB = spack.spec.Spec("p1~usev1").concretized()
|
||||
|
||||
c1 = spack.cmd.diff.compare_specs(specA, specB, to_string=False)
|
||||
|
||||
def match(function, name, args):
|
||||
limit = len(args)
|
||||
return function.name == name and list(args[:limit]) == list(function.args[:limit])
|
||||
|
||||
def find(function_list, name, args):
|
||||
return any(match(f, name, args) for f in function_list)
|
||||
|
||||
assert find(c1["a_not_b"], "node_os", ["p4"])
|
||||
|
||||
c2 = spack.cmd.diff.compare_specs(specA, specB, ignore_packages=["v1"], to_string=False)
|
||||
|
||||
assert not find(c2["a_not_b"], "node_os", ["p4"])
|
||||
assert find(c2["intersect"], "node_os", ["p3"])
|
||||
|
||||
# Check ignoring changes on multiple packages
|
||||
|
||||
specA = spack.spec.Spec("p1+usev1 ^p3+p3var").concretized()
|
||||
specA = spack.spec.Spec("p1~usev1 ^p3~p3var").concretized()
|
||||
|
||||
c3 = spack.cmd.diff.compare_specs(specA, specB, to_string=False)
|
||||
assert find(c3["a_not_b"], "variant_value", ["p3", "p3var"])
|
||||
|
||||
c4 = spack.cmd.diff.compare_specs(specA, specB, ignore_packages=["v1", "p3"], to_string=False)
|
||||
assert not find(c4["a_not_b"], "node_os", ["p4"])
|
||||
assert not find(c4["a_not_b"], "variant_value", ["p3"])
|
||||
|
||||
|
||||
def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test that we can install two packages and diff them"""
|
||||
|
||||
|
@@ -53,6 +53,7 @@
|
||||
stage = SpackCommand("stage")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
find = SpackCommand("find")
|
||||
develop = SpackCommand("develop")
|
||||
module = SpackCommand("module")
|
||||
|
||||
sep = os.sep
|
||||
@@ -719,10 +720,10 @@ def test_env_with_config(environment_from_manifest):
|
||||
assert any(x.intersects("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
def test_with_config_bad_include(environment_from_manifest):
|
||||
def test_with_config_bad_include_create(environment_from_manifest):
|
||||
"""Confirm missing include paths raise expected exception and error."""
|
||||
with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
|
||||
e = environment_from_manifest(
|
||||
environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
@@ -730,9 +731,42 @@ def test_with_config_bad_include(environment_from_manifest):
|
||||
- no/such/file.yaml
|
||||
"""
|
||||
)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
|
||||
def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
env_root = pathlib.Path(tmpdir.ensure("env-root", dir=True))
|
||||
include1 = env_root / "include1.yaml"
|
||||
include1.touch()
|
||||
|
||||
abs_include_path = os.path.abspath(tmpdir.join("subdir").ensure("include2.yaml"))
|
||||
|
||||
spack_yaml = env_root / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
f"""
|
||||
spack:
|
||||
include:
|
||||
- ./include1.yaml
|
||||
- {abs_include_path}
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(env_root)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
# we've created an environment with some included config files (which do
|
||||
# in fact exist): now we remove them and check that we get a sensible
|
||||
# error message
|
||||
|
||||
os.remove(abs_include_path)
|
||||
os.remove(include1)
|
||||
with pytest.raises(spack.config.ConfigFileError) as exc:
|
||||
ev.activate(e)
|
||||
|
||||
err = exc.value.message
|
||||
assert "missing include" in err
|
||||
assert abs_include_path in err
|
||||
assert "include1.yaml" in err
|
||||
assert ev.active_environment() is None
|
||||
|
||||
|
||||
@@ -1173,7 +1207,7 @@ def test_env_blocks_uninstall(mock_stage, mock_fetch, install_mockery):
|
||||
add("mpileaks")
|
||||
install("--fake")
|
||||
|
||||
out = uninstall("mpileaks", fail_on_error=False)
|
||||
out = uninstall("-y", "mpileaks", fail_on_error=False)
|
||||
assert uninstall.returncode == 1
|
||||
assert "The following environments still reference these specs" in out
|
||||
|
||||
@@ -2902,13 +2936,15 @@ def test_virtual_spec_concretize_together(tmpdir):
|
||||
assert any(s.package.provides("mpi") for _, s in e.concretized_specs())
|
||||
|
||||
|
||||
def test_query_develop_specs():
|
||||
def test_query_develop_specs(tmpdir):
|
||||
"""Test whether a spec is develop'ed or not"""
|
||||
srcdir = tmpdir.ensure("here")
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
e.add("mpich")
|
||||
e.add("mpileaks")
|
||||
e.develop(Spec("mpich@=1"), "here", clone=False)
|
||||
develop("--no-clone", "-p", str(srcdir), "mpich@=1")
|
||||
|
||||
assert e.is_develop(Spec("mpich"))
|
||||
assert not e.is_develop(Spec("mpileaks"))
|
||||
|
@@ -398,3 +398,12 @@ def test_mirror_set_2(mutable_config):
|
||||
"url": "http://example.com",
|
||||
"push": {"url": "http://example2.com", "access_pair": ["username", "password"]},
|
||||
}
|
||||
|
||||
|
||||
def test_mirror_add_set_signed(mutable_config):
|
||||
mirror("add", "--signed", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
|
||||
mirror("set", "--unsigned", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": False}
|
||||
mirror("set", "--signed", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -74,41 +73,6 @@ def test_spec_parse_cflags_quoting():
|
||||
assert ["-flto", "-Os"] == gh_flagged.compiler_flags["cxxflags"]
|
||||
|
||||
|
||||
def test_spec_parse_unquoted_flags_report():
|
||||
"""Verify that a useful error message is produced if unquoted compiler flags are
|
||||
provided."""
|
||||
# This should fail during parsing, since /usr/include is interpreted as a spec hash.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
# We don't try to figure out how many following args were intended to be part of
|
||||
# cflags, we just explain how to fix it for the immediate next arg.
|
||||
spec("gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include")
|
||||
# Verify that the generated error message is nicely formatted.
|
||||
|
||||
expected_message = dedent(
|
||||
'''\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
(1) cflags=-Os -pipe => cflags="-Os -pipe"
|
||||
(2) cflags=-I /usr/include => cflags="-I /usr/include"'''
|
||||
)
|
||||
|
||||
assert expected_message in str(cm.value)
|
||||
|
||||
# Verify that the same unquoted cflags report is generated in the error message even
|
||||
# if it fails during concretization, not just during parsing.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
spec("gcc cflags=-Os -pipe")
|
||||
cm = str(cm.value)
|
||||
assert cm.startswith(
|
||||
'trying to set variant "pipe" in package "gcc", but the package has no such variant'
|
||||
)
|
||||
assert cm.endswith('(1) cflags=-Os -pipe => cflags="-Os -pipe"')
|
||||
|
||||
|
||||
def test_spec_yaml():
|
||||
output = spec("--yaml", "mpileaks")
|
||||
|
||||
|
@@ -17,7 +17,7 @@
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
def test_undevelop(tmpdir, mutable_config, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
@@ -46,7 +46,7 @@ def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
assert not after.satisfies("dev_path=*")
|
||||
|
||||
|
||||
def test_undevelop_nonexistent(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
def test_undevelop_nonexistent(tmpdir, mutable_config, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir("env")
|
||||
with envdir.as_cwd():
|
||||
|
@@ -253,8 +253,8 @@ def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
f.write(
|
||||
"""#!/bin/bash
|
||||
if [[ $ENV_SET == "1" && $MODULE_LOADED == "1" ]]; then
|
||||
"""#!/bin/sh
|
||||
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
|
||||
echo '"""
|
||||
+ no_flag_output
|
||||
+ """'
|
||||
@@ -699,8 +699,8 @@ def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
f.write(
|
||||
"""#!/bin/bash
|
||||
if [[ $CMP_ON == "1" ]]; then
|
||||
"""#!/bin/sh
|
||||
if [ "$CMP_ON" = "1" ]; then
|
||||
echo "$CMP_VER"
|
||||
fi
|
||||
"""
|
||||
@@ -745,8 +745,8 @@ def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
f.write(
|
||||
"""#!/bin/bash
|
||||
if [[ $CMP_ON == "1" ]]; then
|
||||
"""#!/bin/sh
|
||||
if [ "$CMP_ON" = "1" ]; then
|
||||
echo "$CMP_VER"
|
||||
fi
|
||||
"""
|
||||
@@ -799,7 +799,7 @@ def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
f.write(
|
||||
"""#!/bin/bash
|
||||
"""#!/bin/sh
|
||||
echo "4.4.4"
|
||||
"""
|
||||
) # Version for which c++11 flag is -std=c++0x
|
||||
|
@@ -1817,12 +1817,14 @@ def test_git_ref_version_succeeds_with_unknown_version(self, git_ref):
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
|
||||
def test_installed_externals_are_reused(
|
||||
self, mutable_database, repo_with_changing_recipe, tmp_path
|
||||
):
|
||||
"""Test that external specs that are in the DB can be reused."""
|
||||
external_conf = {
|
||||
"changing": {
|
||||
"buildable": False,
|
||||
"externals": [{"spec": "changing@1.0", "prefix": "/usr"}],
|
||||
"externals": [{"spec": "changing@1.0", "prefix": str(tmp_path)}],
|
||||
}
|
||||
}
|
||||
spack.config.set("packages", external_conf)
|
||||
@@ -1847,12 +1849,12 @@ def test_installed_externals_are_reused(self, mutable_database, repo_with_changi
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_user_can_select_externals_with_require(self, mutable_database):
|
||||
def test_user_can_select_externals_with_require(self, mutable_database, tmp_path):
|
||||
"""Test that users have means to select an external even in presence of reusable specs."""
|
||||
external_conf = {
|
||||
"mpi": {"buildable": False},
|
||||
"multi-provider-mpi": {
|
||||
"externals": [{"spec": "multi-provider-mpi@2.0.0", "prefix": "/usr"}]
|
||||
"externals": [{"spec": "multi-provider-mpi@2.0.0", "prefix": str(tmp_path)}]
|
||||
},
|
||||
}
|
||||
spack.config.set("packages", external_conf)
|
||||
@@ -2434,7 +2436,8 @@ def test_reusable_externals_match(mock_packages, tmpdir):
|
||||
spec.external_path = tmpdir.strpath
|
||||
spec.external_modules = ["mpich/4.1"]
|
||||
spec._mark_concrete()
|
||||
assert spack.solver.asp._is_reusable_external(
|
||||
assert spack.solver.asp._is_reusable(
|
||||
spec,
|
||||
{
|
||||
"mpich": {
|
||||
"externals": [
|
||||
@@ -2442,7 +2445,7 @@ def test_reusable_externals_match(mock_packages, tmpdir):
|
||||
]
|
||||
}
|
||||
},
|
||||
spec,
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -2451,7 +2454,8 @@ def test_reusable_externals_match_virtual(mock_packages, tmpdir):
|
||||
spec.external_path = tmpdir.strpath
|
||||
spec.external_modules = ["mpich/4.1"]
|
||||
spec._mark_concrete()
|
||||
assert spack.solver.asp._is_reusable_external(
|
||||
assert spack.solver.asp._is_reusable(
|
||||
spec,
|
||||
{
|
||||
"mpi": {
|
||||
"externals": [
|
||||
@@ -2459,7 +2463,7 @@ def test_reusable_externals_match_virtual(mock_packages, tmpdir):
|
||||
]
|
||||
}
|
||||
},
|
||||
spec,
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -2468,7 +2472,8 @@ def test_reusable_externals_different_prefix(mock_packages, tmpdir):
|
||||
spec.external_path = "/other/path"
|
||||
spec.external_modules = ["mpich/4.1"]
|
||||
spec._mark_concrete()
|
||||
assert not spack.solver.asp._is_reusable_external(
|
||||
assert not spack.solver.asp._is_reusable(
|
||||
spec,
|
||||
{
|
||||
"mpich": {
|
||||
"externals": [
|
||||
@@ -2476,7 +2481,7 @@ def test_reusable_externals_different_prefix(mock_packages, tmpdir):
|
||||
]
|
||||
}
|
||||
},
|
||||
spec,
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -2486,7 +2491,8 @@ def test_reusable_externals_different_modules(mock_packages, tmpdir, modules):
|
||||
spec.external_path = tmpdir.strpath
|
||||
spec.external_modules = modules
|
||||
spec._mark_concrete()
|
||||
assert not spack.solver.asp._is_reusable_external(
|
||||
assert not spack.solver.asp._is_reusable(
|
||||
spec,
|
||||
{
|
||||
"mpich": {
|
||||
"externals": [
|
||||
@@ -2494,7 +2500,7 @@ def test_reusable_externals_different_modules(mock_packages, tmpdir, modules):
|
||||
]
|
||||
}
|
||||
},
|
||||
spec,
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -2502,6 +2508,8 @@ def test_reusable_externals_different_spec(mock_packages, tmpdir):
|
||||
spec = Spec("mpich@4.1%gcc@13.1.0~debug build_system=generic arch=linux-ubuntu23.04-zen2")
|
||||
spec.external_path = tmpdir.strpath
|
||||
spec._mark_concrete()
|
||||
assert not spack.solver.asp._is_reusable_external(
|
||||
{"mpich": {"externals": [{"spec": "mpich@4.1 +debug", "prefix": tmpdir.strpath}]}}, spec
|
||||
assert not spack.solver.asp._is_reusable(
|
||||
spec,
|
||||
{"mpich": {"externals": [{"spec": "mpich@4.1 +debug", "prefix": tmpdir.strpath}]}},
|
||||
local=False,
|
||||
)
|
||||
|
42
lib/spack/spack/test/concretize_compiler_runtimes.py
Normal file
42
lib/spack/spack/test/concretize_compiler_runtimes.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
from spack.version import Version
|
||||
|
||||
pytestmark = [pytest.mark.only_clingo("Original concretizer does not support compiler runtimes")]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_repo(config):
|
||||
repo = os.path.join(spack.paths.repos_path, "compiler_runtime.test")
|
||||
with spack.repo.use_repositories(repo) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def enable_runtimes():
|
||||
original = spack.solver.asp.WITH_RUNTIME
|
||||
spack.solver.asp.WITH_RUNTIME = True
|
||||
yield
|
||||
spack.solver.asp.WITH_RUNTIME = original
|
||||
|
||||
|
||||
def test_correct_gcc_runtime_is_injected_as_dependency(runtime_repo, enable_runtimes):
|
||||
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@4.5.0").concretized()
|
||||
a, b = s["a"], s["b"]
|
||||
|
||||
# Both a and b should depend on the same gcc-runtime directly
|
||||
assert a.dependencies("gcc-runtime") == b.dependencies("gcc-runtime")
|
||||
|
||||
# And the gcc-runtime version should be that of the newest gcc used in the dag.
|
||||
assert a["gcc-runtime"].version == Version("10.2.1")
|
@@ -16,8 +16,8 @@
|
||||
version_error_messages = [
|
||||
"Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:",
|
||||
" required because quantum-espresso depends on fftw@:1.0",
|
||||
" required because quantum-espresso ^fftw@1.1: requested from CLI",
|
||||
" required because quantum-espresso ^fftw@1.1: requested from CLI",
|
||||
" required because quantum-espresso ^fftw@1.1: requested explicitly",
|
||||
" required because quantum-espresso ^fftw@1.1: requested explicitly",
|
||||
]
|
||||
|
||||
external_error_messages = [
|
||||
@@ -30,15 +30,15 @@
|
||||
" which was not satisfied"
|
||||
),
|
||||
" 'quantum-espresso+veritas' required",
|
||||
" required because quantum-espresso+veritas requested from CLI",
|
||||
" required because quantum-espresso+veritas requested explicitly",
|
||||
]
|
||||
|
||||
variant_error_messages = [
|
||||
"'fftw' required multiple values for single-valued variant 'mpi'",
|
||||
" Requested '~mpi' and '+mpi'",
|
||||
" required because quantum-espresso depends on fftw+mpi when +invino",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested from CLI",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested from CLI",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested explicitly",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested explicitly",
|
||||
]
|
||||
|
||||
external_config = {
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.version
|
||||
from spack.solver.asp import InternalConcretizerError, UnsatisfiableSpecError
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.util.url import path_to_file_url
|
||||
|
||||
pytestmark = [
|
||||
@@ -92,30 +93,13 @@ class U(Package):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_test_repo(tmpdir, mutable_config):
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
repo:
|
||||
namespace: testcfgrequirements
|
||||
"""
|
||||
)
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in [_pkgx, _pkgy, _pkgv, _pkgt, _pkgu]:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
yield spack.repo.Repo(repo_path)
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
yield create_test_repo(tmpdir, [_pkgx, _pkgy, _pkgv, _pkgt, _pkgu])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(create_test_repo) as mock_repo_path:
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@@ -530,7 +514,7 @@ def test_oneof_ordering(concretize_scope, test_repo):
|
||||
assert s2.satisfies("@2.5")
|
||||
|
||||
|
||||
def test_reuse_oneof(concretize_scope, create_test_repo, mutable_database, fake_installs):
|
||||
def test_reuse_oneof(concretize_scope, _create_test_repo, mutable_database, fake_installs):
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -538,7 +522,7 @@ def test_reuse_oneof(concretize_scope, create_test_repo, mutable_database, fake_
|
||||
- one_of: ["@2.5", "%gcc"]
|
||||
"""
|
||||
|
||||
with spack.repo.use_repositories(create_test_repo):
|
||||
with spack.repo.use_repositories(_create_test_repo):
|
||||
s1 = Spec("y@2.5%gcc").concretized()
|
||||
s1.package.do_install(fake=True, explicit=True)
|
||||
|
||||
|
@@ -502,6 +502,34 @@ def test_parse_install_tree(config_settings, expected, mutable_config):
|
||||
assert projections == expected_proj
|
||||
|
||||
|
||||
def test_change_or_add(mutable_config, mock_packages):
|
||||
spack.config.add("packages:a:version:['1.0']", scope="user")
|
||||
|
||||
spack.config.add("packages:b:version:['1.1']", scope="system")
|
||||
|
||||
class ChangeTest:
|
||||
def __init__(self, pkg_name, new_version):
|
||||
self.pkg_name = pkg_name
|
||||
self.new_version = new_version
|
||||
|
||||
def find_fn(self, section):
|
||||
return self.pkg_name in section
|
||||
|
||||
def change_fn(self, section):
|
||||
pkg_section = section.get(self.pkg_name, {})
|
||||
pkg_section["version"] = self.new_version
|
||||
section[self.pkg_name] = pkg_section
|
||||
|
||||
change1 = ChangeTest("b", ["1.2"])
|
||||
spack.config.change_or_add("packages", change1.find_fn, change1.change_fn)
|
||||
assert "b" not in mutable_config.get("packages", scope="user")
|
||||
assert mutable_config.get("packages")["b"]["version"] == ["1.2"]
|
||||
|
||||
change2 = ChangeTest("c", ["1.0"])
|
||||
spack.config.change_or_add("packages", change2.find_fn, change2.change_fn)
|
||||
assert "c" in mutable_config.get("packages", scope="user")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Padding unsupported on Windows")
|
||||
@pytest.mark.parametrize(
|
||||
"config_settings,expected",
|
||||
|
@@ -44,6 +44,7 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
@@ -1949,21 +1950,22 @@ def pytest_runtest_setup(item):
|
||||
pytest.skip(*not_on_windows_marker.args)
|
||||
|
||||
|
||||
class MockPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def disable_parallel_buildcache_push(monkeypatch):
|
||||
class MockPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool)
|
||||
|
||||
|
||||
@@ -1976,3 +1978,24 @@ def mock_modules_root(tmp_path, monkeypatch):
|
||||
"""Sets the modules root to a temporary directory, to avoid polluting configuration scopes."""
|
||||
fn = functools.partial(_root_path, path=str(tmp_path))
|
||||
monkeypatch.setattr(spack.modules.common, "root_path", fn)
|
||||
|
||||
|
||||
def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
repo:
|
||||
namespace: testcfgrequirements
|
||||
"""
|
||||
)
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in pkg_name_content_tuples:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cray_manifest import compiler_from_entry, entries_to_specs
|
||||
@@ -488,3 +489,23 @@ def test_find_external_nonempty_default_manifest_dir(
|
||||
spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False)
|
||||
specs = spack.store.STORE.db.query("hwloc")
|
||||
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
|
||||
|
||||
|
||||
def test_reusable_externals_cray_manifest(
|
||||
tmpdir, mutable_config, mock_packages, temporary_store, manifest_content
|
||||
):
|
||||
"""The concretizer should be able to reuse specs imported from a manifest without a
|
||||
externals config entry in packages.yaml"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("external-db.json", "w") as f:
|
||||
json.dump(manifest_content, f)
|
||||
cray_manifest.read(path="external-db.json", apply_updates=True)
|
||||
|
||||
# Get any imported spec
|
||||
spec = temporary_store.db.query_local()[0]
|
||||
|
||||
# Reusable if imported locally
|
||||
assert spack.solver.asp._is_reusable(spec, packages={}, local=True)
|
||||
|
||||
# If cray manifest entries end up in a build cache somehow, they are not reusable
|
||||
assert not spack.solver.asp._is_reusable(spec, packages={}, local=False)
|
||||
|
@@ -321,3 +321,18 @@ def inner():
|
||||
"""
|
||||
).format(__file__)
|
||||
)
|
||||
|
||||
|
||||
def test_grouped_exception_base_type():
|
||||
h = llnl.util.lang.GroupedExceptionHandler()
|
||||
|
||||
with h.forward("catch-runtime-error", RuntimeError):
|
||||
raise NotImplementedError()
|
||||
|
||||
with pytest.raises(NotImplementedError):
|
||||
with h.forward("catch-value-error", ValueError):
|
||||
raise NotImplementedError()
|
||||
|
||||
message = h.grouped_message(with_tracebacks=False)
|
||||
assert "catch-runtime-error" in message
|
||||
assert "catch-value-error" not in message
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.oci.opener
|
||||
from spack.binary_distribution import gzip_compressed_tarfile
|
||||
from spack.main import SpackCommand
|
||||
@@ -20,6 +21,8 @@
|
||||
|
||||
buildcache = SpackCommand("buildcache")
|
||||
mirror = SpackCommand("mirror")
|
||||
env = SpackCommand("env")
|
||||
install = SpackCommand("install")
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -53,6 +56,46 @@ def test_buildcache_push_command(mutable_database, disable_parallel_buildcache_p
|
||||
assert os.path.exists(os.path.join(spec.prefix, "bin", "mpileaks"))
|
||||
|
||||
|
||||
def test_buildcache_tag(
|
||||
install_mockery, mock_fetch, mutable_mock_env_path, disable_parallel_buildcache_push
|
||||
):
|
||||
"""Tests whether we can create an OCI image from a full environment with multiple roots."""
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
install("--add", "libelf")
|
||||
install("--add", "trivial-install-test-package")
|
||||
|
||||
registry = InMemoryOCIRegistry("example.com")
|
||||
|
||||
with oci_servers(registry):
|
||||
mirror("add", "oci-test", "oci://example.com/image")
|
||||
|
||||
with ev.read("test"):
|
||||
buildcache("push", "--tag", "full_env", "oci-test")
|
||||
|
||||
name = ImageReference.from_string("example.com/image:full_env")
|
||||
|
||||
with ev.read("test") as e:
|
||||
specs = e.all_specs()
|
||||
|
||||
manifest, config = get_manifest_and_config(name)
|
||||
|
||||
# without a base image, we should have one layer per spec
|
||||
assert len(manifest["layers"]) == len(specs)
|
||||
|
||||
# Now create yet another tag, but with just a single selected spec as root. This should
|
||||
# also test the case where Spack doesn't have to upload any binaries, it just has to create
|
||||
# a new tag.
|
||||
libelf = next(s for s in specs if s.name == "libelf")
|
||||
with ev.read("test"):
|
||||
# Get libelf spec
|
||||
buildcache("push", "--tag", "single_spec", "oci-test", libelf.format("libelf{/hash}"))
|
||||
|
||||
name = ImageReference.from_string("example.com/image:single_spec")
|
||||
manifest, config = get_manifest_and_config(name)
|
||||
assert len(manifest["layers"]) == 1
|
||||
|
||||
|
||||
def test_buildcache_push_with_base_image_command(
|
||||
mutable_database, tmpdir, disable_parallel_buildcache_push
|
||||
):
|
||||
|
@@ -1288,6 +1288,17 @@ def test_call_dag_hash_on_old_dag_hash_spec(mock_packages, default_mock_concreti
|
||||
spec.package_hash()
|
||||
|
||||
|
||||
def test_spec_trim(mock_packages, config):
|
||||
top = Spec("dt-diamond").concretized()
|
||||
top.trim("dt-diamond-left")
|
||||
remaining = set(x.name for x in top.traverse())
|
||||
assert set(["dt-diamond", "dt-diamond-right", "dt-diamond-bottom"]) == remaining
|
||||
|
||||
top.trim("dt-diamond-right")
|
||||
remaining = set(x.name for x in top.traverse())
|
||||
assert set(["dt-diamond"]) == remaining
|
||||
|
||||
|
||||
@pytest.mark.regression("30861")
|
||||
def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
# create an "old" spec with no package hash
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
@@ -9,6 +10,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd
|
||||
import spack.platforms.test
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
@@ -203,7 +205,8 @@ def _specfile_for(spec_str, filename):
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e",
|
||||
),
|
||||
(
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 ^stackwalker@8.1_1e", # noqa: E501
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
"^stackwalker@8.1_1e",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
@@ -217,7 +220,8 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
'mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3" +debug~qt_4 ^stackwalker@8.1_1e', # noqa: E501
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
"^stackwalker@8.1_1e",
|
||||
),
|
||||
# Specs containing YAML or JSON in the package name
|
||||
(
|
||||
@@ -424,7 +428,7 @@ def _specfile_for(spec_str, filename):
|
||||
compiler_with_version_range("%gcc@10.1.0,12.2.1:"),
|
||||
compiler_with_version_range("%gcc@:8.4.3,10.2.1:12.1.0"),
|
||||
# Special key value arguments
|
||||
("dev_path=*", [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path=*"),
|
||||
("dev_path=*", [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path='*'"),
|
||||
(
|
||||
"dev_path=none",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="dev_path=none")],
|
||||
@@ -444,33 +448,28 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"cflags=a=b=c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
'cflags="a=b=c"',
|
||||
"cflags='a=b=c'",
|
||||
),
|
||||
(
|
||||
"cflags=a=b=c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
'cflags="a=b=c"',
|
||||
"cflags='a=b=c'",
|
||||
),
|
||||
(
|
||||
"cflags=a=b=c+~",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c+~")],
|
||||
'cflags="a=b=c+~"',
|
||||
"cflags='a=b=c+~'",
|
||||
),
|
||||
(
|
||||
"cflags=-Wl,a,b,c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=-Wl,a,b,c")],
|
||||
'cflags="-Wl,a,b,c"',
|
||||
"cflags=-Wl,a,b,c",
|
||||
),
|
||||
# Multi quoted
|
||||
(
|
||||
"cflags=''-Wl,a,b,c''",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=''-Wl,a,b,c''")],
|
||||
'cflags="-Wl,a,b,c"',
|
||||
),
|
||||
(
|
||||
'cflags=="-O3 -g"',
|
||||
[Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')],
|
||||
'cflags=="-O3 -g"',
|
||||
"cflags=='-O3 -g'",
|
||||
),
|
||||
# Whitespace is allowed in version lists
|
||||
("@1.2:1.4 , 1.6 ", [Token(TokenType.VERSION, value="@1.2:1.4 , 1.6")], "@1.2:1.4,1.6"),
|
||||
@@ -484,22 +483,6 @@ def _specfile_for(spec_str, filename):
|
||||
],
|
||||
"a@1:",
|
||||
),
|
||||
(
|
||||
"@1.2: develop = foo",
|
||||
[
|
||||
Token(TokenType.VERSION, value="@1.2:"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"),
|
||||
],
|
||||
"@1.2: develop=foo",
|
||||
),
|
||||
(
|
||||
"@1.2:develop = foo",
|
||||
[
|
||||
Token(TokenType.VERSION, value="@1.2:"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"),
|
||||
],
|
||||
"@1.2: develop=foo",
|
||||
),
|
||||
(
|
||||
"% intel @ 12.1:12.6 + debug",
|
||||
[
|
||||
@@ -587,8 +570,8 @@ def _specfile_for(spec_str, filename):
|
||||
)
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip):
|
||||
parser = SpecParser(spec_str)
|
||||
assert parser.tokens() == tokens
|
||||
assert str(parser.next_spec()) == expected_roundtrip
|
||||
assert tokens == parser.tokens()
|
||||
assert expected_roundtrip == str(parser.next_spec())
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -654,20 +637,80 @@ def test_parse_multiple_specs(text, tokens, expected_specs):
|
||||
assert str(total_parser.next_spec()) == str(single_spec_parser.next_spec())
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args,expected",
|
||||
[
|
||||
# Test that CLI-quoted flags/variant values are preserved
|
||||
(["zlib", "cflags=-O3 -g", "+bar", "baz"], "zlib cflags='-O3 -g' +bar baz"),
|
||||
# Test that CLI-quoted propagated flags/variant values are preserved
|
||||
(["zlib", "cflags==-O3 -g", "+bar", "baz"], "zlib cflags=='-O3 -g' +bar baz"),
|
||||
# An entire string passed on the CLI with embedded quotes also works
|
||||
(["zlib cflags='-O3 -g' +bar baz"], "zlib cflags='-O3 -g' +bar baz"),
|
||||
# Entire string *without* quoted flags splits -O3/-g (-g interpreted as a variant)
|
||||
(["zlib cflags=-O3 -g +bar baz"], "zlib cflags=-O3 +bar~g baz"),
|
||||
# If the entirety of "-O3 -g +bar baz" is quoted on the CLI, it's all taken as flags
|
||||
(["zlib", "cflags=-O3 -g +bar baz"], "zlib cflags='-O3 -g +bar baz'"),
|
||||
# If the string doesn't start with key=, it needs internal quotes for flags
|
||||
(["zlib", " cflags=-O3 -g +bar baz"], "zlib cflags=-O3 +bar~g baz"),
|
||||
# Internal quotes for quoted CLI args are considered part of *one* arg
|
||||
(["zlib", 'cflags="-O3 -g" +bar baz'], """zlib cflags='"-O3 -g" +bar baz'"""),
|
||||
# Use double quotes if internal single quotes are present
|
||||
(["zlib", "cflags='-O3 -g' +bar baz"], '''zlib cflags="'-O3 -g' +bar baz"'''),
|
||||
# Use single quotes and escape single quotes with internal single and double quotes
|
||||
(["zlib", "cflags='-O3 -g' \"+bar baz\""], 'zlib cflags="\'-O3 -g\' \\"+bar baz\\""'),
|
||||
# Ensure that empty strings are handled correctly on CLI
|
||||
(["zlib", "ldflags=", "+pic"], "zlib+pic"),
|
||||
# These flags are assumed to be quoted by the shell, but the space doesn't matter because
|
||||
# flags are space-separated.
|
||||
(["zlib", "ldflags= +pic"], "zlib ldflags='+pic'"),
|
||||
(["ldflags= +pic"], "ldflags='+pic'"),
|
||||
# If the name is not a flag name, the space is preserved verbatim, because variant values
|
||||
# are comma-separated.
|
||||
(["zlib", "foo= +pic"], "zlib foo=' +pic'"),
|
||||
(["foo= +pic"], "foo=' +pic'"),
|
||||
# You can ensure no quotes are added parse_specs() by starting your string with space,
|
||||
# but you still need to quote empty strings properly.
|
||||
([" ldflags= +pic"], SpecTokenizationError),
|
||||
([" ldflags=", "+pic"], SpecTokenizationError),
|
||||
([" ldflags='' +pic"], "+pic"),
|
||||
([" ldflags=''", "+pic"], "+pic"),
|
||||
# Ensure that empty strings are handled properly in quoted strings
|
||||
(["zlib ldflags='' +pic"], "zlib+pic"),
|
||||
# Ensure that $ORIGIN is handled correctly
|
||||
(["zlib", "ldflags=-Wl,-rpath=$ORIGIN/_libs"], "zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'"),
|
||||
# Ensure that passing escaped quotes on the CLI raises a tokenization error
|
||||
(["zlib", '"-g', '-O2"'], SpecTokenizationError),
|
||||
],
|
||||
)
|
||||
def test_cli_spec_roundtrip(args, expected):
|
||||
if inspect.isclass(expected) and issubclass(expected, BaseException):
|
||||
with pytest.raises(expected):
|
||||
spack.cmd.parse_specs(args)
|
||||
return
|
||||
|
||||
specs = spack.cmd.parse_specs(args)
|
||||
output_string = " ".join(str(spec) for spec in specs)
|
||||
assert expected == output_string
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"text,expected_in_error",
|
||||
[
|
||||
("x@@1.2", "x@@1.2\n ^^^^^"),
|
||||
("y ^x@@1.2", "y ^x@@1.2\n ^^^^^"),
|
||||
("x@1.2::", "x@1.2::\n ^"),
|
||||
("x::", "x::\n ^^"),
|
||||
("x@@1.2", r"x@@1.2\n ^"),
|
||||
("y ^x@@1.2", r"y ^x@@1.2\n ^"),
|
||||
("x@1.2::", r"x@1.2::\n ^"),
|
||||
("x::", r"x::\n ^^"),
|
||||
("cflags=''-Wl,a,b,c''", r"cflags=''-Wl,a,b,c''\n ^ ^ ^ ^^"),
|
||||
("@1.2: develop = foo", r"@1.2: develop = foo\n ^^"),
|
||||
("@1.2:develop = foo", r"@1.2:develop = foo\n ^^"),
|
||||
],
|
||||
)
|
||||
def test_error_reporting(text, expected_in_error):
|
||||
parser = SpecParser(text)
|
||||
with pytest.raises(SpecTokenizationError) as exc:
|
||||
parser.tokens()
|
||||
assert expected_in_error in str(exc), parser.tokens()
|
||||
|
||||
assert expected_in_error in str(exc), parser.tokens()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@@ -734,3 +734,40 @@ def test_conditional_value_comparable_to_bool(other):
|
||||
value = spack.variant.Value("98", when="@1.0")
|
||||
comparison = value == other
|
||||
assert comparison is False
|
||||
|
||||
|
||||
@pytest.mark.regression("40405")
|
||||
def test_wild_card_valued_variants_equivalent_to_str():
|
||||
"""
|
||||
There was a bug prioro to PR 40406 in that variants with wildcard values "*"
|
||||
were being overwritten in the variant constructor.
|
||||
The expected/appropriate behavior is for it to behave like value=str and this
|
||||
test demonstrates that the two are now equivalent
|
||||
"""
|
||||
str_var = spack.variant.Variant(
|
||||
name="str_var",
|
||||
default="none",
|
||||
values=str,
|
||||
description="str variant",
|
||||
multi=True,
|
||||
validator=None,
|
||||
)
|
||||
|
||||
wild_var = spack.variant.Variant(
|
||||
name="wild_var",
|
||||
default="none",
|
||||
values="*",
|
||||
description="* variant",
|
||||
multi=True,
|
||||
validator=None,
|
||||
)
|
||||
|
||||
several_arbitrary_values = ("doe", "re", "mi")
|
||||
# "*" case
|
||||
wild_output = wild_var.make_variant(several_arbitrary_values)
|
||||
wild_var.validate_or_raise(wild_output)
|
||||
# str case
|
||||
str_output = str_var.make_variant(several_arbitrary_values)
|
||||
str_var.validate_or_raise(str_output)
|
||||
# equivalence each instance already validated
|
||||
assert str_output.value == wild_output.value
|
||||
|
@@ -251,7 +251,7 @@ def traverse_depth_first_edges_generator(edges, visitor, post_order=False, root=
|
||||
neighbors = [EdgeAndDepth(edge=n, depth=edge.depth + 1) for n in visitor.neighbors(edge)]
|
||||
|
||||
# This extra branch is just for efficiency.
|
||||
if len(neighbors) >= 0:
|
||||
if len(neighbors) > 0:
|
||||
for item in traverse_depth_first_edges_generator(
|
||||
neighbors, visitor, post_order, root, depth
|
||||
):
|
||||
|
@@ -3,18 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable
|
||||
|
||||
from llnl.util.lang import nullcontext
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.util.environment as environment
|
||||
import spack.util.prefix as prefix
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
|
||||
@@ -70,22 +66,6 @@ def unconditional_environment_modifications(view):
|
||||
return env
|
||||
|
||||
|
||||
@contextmanager
|
||||
def projected_prefix(*specs: spack.spec.Spec, projection: Callable[[spack.spec.Spec], str]):
|
||||
"""Temporarily replace every Spec's prefix with projection(s)"""
|
||||
prefixes = dict()
|
||||
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
|
||||
if s.external:
|
||||
continue
|
||||
prefixes[s.dag_hash()] = s.prefix
|
||||
s.prefix = prefix.Prefix(projection(s))
|
||||
|
||||
yield
|
||||
|
||||
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
|
||||
s.prefix = prefixes.get(s.dag_hash(), s.prefix)
|
||||
|
||||
|
||||
def environment_modifications_for_specs(
|
||||
*specs: spack.spec.Spec, view=None, set_package_py_globals: bool = True
|
||||
):
|
||||
@@ -102,26 +82,36 @@ def environment_modifications_for_specs(
|
||||
been built on a different but compatible OS)
|
||||
"""
|
||||
env = environment.EnvironmentModifications()
|
||||
topo_ordered = traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo")
|
||||
topo_ordered = list(
|
||||
traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo")
|
||||
)
|
||||
|
||||
# Static environment changes (prefix inspections)
|
||||
for s in reversed(topo_ordered):
|
||||
static = environment.inspect_path(
|
||||
s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path
|
||||
)
|
||||
env.extend(static)
|
||||
|
||||
# Dynamic environment changes (setup_run_environment etc)
|
||||
setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN)
|
||||
if set_package_py_globals:
|
||||
setup_context.set_all_package_py_globals()
|
||||
env.extend(setup_context.get_env_modifications())
|
||||
|
||||
# Apply view projections if any.
|
||||
if view:
|
||||
maybe_projected = projected_prefix(*specs, projection=view.get_projection_for_spec)
|
||||
else:
|
||||
maybe_projected = nullcontext()
|
||||
|
||||
with maybe_projected:
|
||||
# Static environment changes (prefix inspections)
|
||||
for s in reversed(list(topo_ordered)):
|
||||
static = environment.inspect_path(
|
||||
s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path
|
||||
)
|
||||
env.extend(static)
|
||||
|
||||
# Dynamic environment changes (setup_run_environment etc)
|
||||
setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN)
|
||||
if set_package_py_globals:
|
||||
setup_context.set_all_package_py_globals()
|
||||
dynamic = setup_context.get_env_modifications()
|
||||
env.extend(dynamic)
|
||||
prefix_to_prefix = {
|
||||
s.prefix: view.get_projection_for_spec(s)
|
||||
for s in reversed(topo_ordered)
|
||||
if not s.external
|
||||
}
|
||||
# Avoid empty regex if all external
|
||||
if not prefix_to_prefix:
|
||||
return env
|
||||
prefix_regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for mod in env.env_modifications:
|
||||
if isinstance(mod, environment.NameValueModifier):
|
||||
mod.value = prefix_regex.sub(lambda m: prefix_to_prefix[m.group(0)], mod.value)
|
||||
|
||||
return env
|
||||
|
@@ -19,6 +19,7 @@
|
||||
|
||||
import spack.directives
|
||||
import spack.error as error
|
||||
import spack.parser
|
||||
|
||||
special_variant_values = [None, "none", "*"]
|
||||
|
||||
@@ -74,7 +75,7 @@ def isa_type(v):
|
||||
|
||||
self.single_value_validator = isa_type
|
||||
|
||||
if callable(values):
|
||||
elif callable(values):
|
||||
# If 'values' is a callable, assume it is a single value
|
||||
# validator and reset the values to be explicit during debug
|
||||
self.single_value_validator = values
|
||||
@@ -399,13 +400,12 @@ def __contains__(self, item):
|
||||
return item in self._value
|
||||
|
||||
def __repr__(self):
|
||||
cls = type(self)
|
||||
return "{0.__name__}({1}, {2})".format(cls, repr(self.name), repr(self._original_value))
|
||||
return f"{type(self).__name__}({repr(self.name)}, {repr(self._original_value)})"
|
||||
|
||||
def __str__(self):
|
||||
if self.propagate:
|
||||
return "{0}=={1}".format(self.name, ",".join(str(x) for x in self.value))
|
||||
return "{0}={1}".format(self.name, ",".join(str(x) for x in self.value))
|
||||
delim = "==" if self.propagate else "="
|
||||
values = spack.parser.quote_if_needed(",".join(str(v) for v in self.value))
|
||||
return f"{self.name}{delim}{values}"
|
||||
|
||||
|
||||
class MultiValuedVariant(AbstractVariant):
|
||||
@@ -443,15 +443,14 @@ def append(self, value):
|
||||
self._original_value = ",".join(self._value)
|
||||
|
||||
def __str__(self):
|
||||
# Special-case patches to not print the full 64 character hashes
|
||||
# Special-case patches to not print the full 64 character sha256
|
||||
if self.name == "patches":
|
||||
values_str = ",".join(x[:7] for x in self.value)
|
||||
else:
|
||||
values_str = ",".join(str(x) for x in self.value)
|
||||
|
||||
if self.propagate:
|
||||
return "{0}=={1}".format(self.name, values_str)
|
||||
return "{0}={1}".format(self.name, values_str)
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.parser.quote_if_needed(values_str)}"
|
||||
|
||||
|
||||
class SingleValuedVariant(AbstractVariant):
|
||||
@@ -467,9 +466,8 @@ def _value_setter(self, value):
|
||||
self._value = str(self._value[0])
|
||||
|
||||
def __str__(self):
|
||||
if self.propagate:
|
||||
return "{0}=={1}".format(self.name, self.value)
|
||||
return "{0}={1}".format(self.name, self.value)
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.parser.quote_if_needed(self.value)}"
|
||||
|
||||
@implicit_variant_conversion
|
||||
def satisfies(self, other):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user