Compare commits
403 Commits
fix/fewer-
...
packages/u
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4bc67e8e84 | ||
|
|
aeaa922eef | ||
|
|
a6d5a34be3 | ||
|
|
ba79542f3c | ||
|
|
dc10c8a1ed | ||
|
|
5ab814505e | ||
|
|
1d8bdcfc04 | ||
|
|
95cf341b50 | ||
|
|
a134485b1b | ||
|
|
d39edeb9a1 | ||
|
|
453fb27be2 | ||
|
|
831f04fb7d | ||
|
|
04044a9744 | ||
|
|
8077285a63 | ||
|
|
537926c1a7 | ||
|
|
02ff3d7b1e | ||
|
|
491cb278f3 | ||
|
|
ed1ebefd8f | ||
|
|
36d64fcbd4 | ||
|
|
c5cdc2c0a2 | ||
|
|
0eca86f64f | ||
|
|
50027d76a5 | ||
|
|
b4748de5a9 | ||
|
|
8f2532c624 | ||
|
|
0726513334 | ||
|
|
f951f38883 | ||
|
|
5262412c13 | ||
|
|
f022b93249 | ||
|
|
60b5e98182 | ||
|
|
682acae9fd | ||
|
|
c0f80e9117 | ||
|
|
dcf13af459 | ||
|
|
a2e4fb6b95 | ||
|
|
9dd92f493a | ||
|
|
b23e832002 | ||
|
|
ae2f626168 | ||
|
|
a73930da81 | ||
|
|
921d446196 | ||
|
|
8e2ea5de9d | ||
|
|
fd0baca222 | ||
|
|
d2fc0d6a35 | ||
|
|
eedc9e0eaf | ||
|
|
6b85f6b405 | ||
|
|
5686a6b928 | ||
|
|
ad665c6af1 | ||
|
|
d78d6db61e | ||
|
|
f47c307bf4 | ||
|
|
5b4edb9499 | ||
|
|
a6e6093922 | ||
|
|
2e8b4e660e | ||
|
|
0ca1ee8b91 | ||
|
|
a322672259 | ||
|
|
6cab86d0c1 | ||
|
|
86e7e2e070 | ||
|
|
69fca439f4 | ||
|
|
3b90fb589f | ||
|
|
fff126204c | ||
|
|
98e626cf67 | ||
|
|
d8fe628a95 | ||
|
|
4c378840e3 | ||
|
|
18de6a480b | ||
|
|
c6da4d586b | ||
|
|
61d6fc70e8 | ||
|
|
7c65655c7e | ||
|
|
76ca264b72 | ||
|
|
aa58d3c170 | ||
|
|
a32b898a00 | ||
|
|
6fcd43ee64 | ||
|
|
f1f9f00d43 | ||
|
|
8ff27f9257 | ||
|
|
78c62532c7 | ||
|
|
a7f327dced | ||
|
|
310c435396 | ||
|
|
fa3f27e8e7 | ||
|
|
6b0fefff29 | ||
|
|
f613316282 | ||
|
|
1b5b74390f | ||
|
|
b57f88cb89 | ||
|
|
03afc2a1e6 | ||
|
|
1f6ed9324d | ||
|
|
5559772afa | ||
|
|
8728631fe0 | ||
|
|
e34d9cbe5f | ||
|
|
0efba09990 | ||
|
|
a9cb80d792 | ||
|
|
dae6fe711c | ||
|
|
c9a24bc6c5 | ||
|
|
00663f29a9 | ||
|
|
15a48990b6 | ||
|
|
af0b898c2e | ||
|
|
ddf8384bc6 | ||
|
|
670f92f42b | ||
|
|
c81b0e3d2a | ||
|
|
f56d804d85 | ||
|
|
b57f08f22b | ||
|
|
34f3b8fdd0 | ||
|
|
0b3b49b4e0 | ||
|
|
fa96422702 | ||
|
|
e12168ed24 | ||
|
|
c0f2df8e0a | ||
|
|
8807ade98f | ||
|
|
13356ddbcc | ||
|
|
974033be80 | ||
|
|
8755fc7291 | ||
|
|
17c02fe759 | ||
|
|
4c7d18a772 | ||
|
|
b28b26c39a | ||
|
|
23aed605ec | ||
|
|
fdb8d565aa | ||
|
|
9b08296236 | ||
|
|
c82d8c63fa | ||
|
|
7a8989bbfc | ||
|
|
22c86074c8 | ||
|
|
ef9e449322 | ||
|
|
6b73195478 | ||
|
|
c7b9bf6a77 | ||
|
|
a84c91b259 | ||
|
|
e6566dfd67 | ||
|
|
d6419f32b8 | ||
|
|
60ed682577 | ||
|
|
6c1fa8c30b | ||
|
|
09167fe8ac | ||
|
|
eb7951818d | ||
|
|
6959656d51 | ||
|
|
f916b50491 | ||
|
|
7160e1d3e7 | ||
|
|
16369d50a7 | ||
|
|
35fc371222 | ||
|
|
4bbf4a5e79 | ||
|
|
e5bd79b011 | ||
|
|
2267b40bda | ||
|
|
b0b6016e12 | ||
|
|
c24265fe7e | ||
|
|
9d0102ac89 | ||
|
|
52b8b3ed8d | ||
|
|
380030c59a | ||
|
|
867a813328 | ||
|
|
ded3fa50a3 | ||
|
|
84653e8d9f | ||
|
|
ac0fd7138f | ||
|
|
c0f9f47b8c | ||
|
|
7e9d24a145 | ||
|
|
99405e6a4d | ||
|
|
7500a4853c | ||
|
|
54d192e026 | ||
|
|
e6a8eba72d | ||
|
|
4d604c8c9f | ||
|
|
b081e0046f | ||
|
|
baf82c0245 | ||
|
|
884a0a392d | ||
|
|
824f2a5652 | ||
|
|
b894acf1fc | ||
|
|
536856874c | ||
|
|
8d1aaef8b8 | ||
|
|
3216c4362e | ||
|
|
2e822e65fd | ||
|
|
41fde4db8c | ||
|
|
81125c3bd8 | ||
|
|
d8b0df6f5b | ||
|
|
99e3fdb180 | ||
|
|
64cfdc07cb | ||
|
|
59fbbdd9ce | ||
|
|
adedf58297 | ||
|
|
f13ea8aa75 | ||
|
|
109a4d52b5 | ||
|
|
7f2117e2cf | ||
|
|
dfab3e8829 | ||
|
|
822622f07a | ||
|
|
c4194e4f58 | ||
|
|
05c7ff4595 | ||
|
|
a6ac78c7c6 | ||
|
|
8479122e71 | ||
|
|
3d91bfea75 | ||
|
|
27e28b33ee | ||
|
|
eaf8ac7407 | ||
|
|
1e9d550bc6 | ||
|
|
0282fe9efd | ||
|
|
ad3d9c83fe | ||
|
|
910b923c5d | ||
|
|
c1f1e1396d | ||
|
|
3139dbdd39 | ||
|
|
cc6dcdcab9 | ||
|
|
57b83e5fb2 | ||
|
|
55fe73586e | ||
|
|
3be497344a | ||
|
|
05413689b9 | ||
|
|
50da223888 | ||
|
|
719b260cf1 | ||
|
|
3848c41494 | ||
|
|
da720cafd8 | ||
|
|
8f0b029308 | ||
|
|
901f4b789d | ||
|
|
6e6fef1b0e | ||
|
|
ae131a5c7c | ||
|
|
ed59e43e1d | ||
|
|
5e8f9ed1c7 | ||
|
|
9c31ff74c4 | ||
|
|
90c8fe0182 | ||
|
|
58db81c323 | ||
|
|
e867008819 | ||
|
|
9910e06b25 | ||
|
|
a3ece7ff4d | ||
|
|
00f0ca2060 | ||
|
|
35557ac21c | ||
|
|
54662f7ae1 | ||
|
|
9cdb2a8dbb | ||
|
|
ffdab20294 | ||
|
|
e91a69a756 | ||
|
|
7665076339 | ||
|
|
49ba2d84a0 | ||
|
|
0cec923e0a | ||
|
|
b97b001dad | ||
|
|
113e231abe | ||
|
|
f43ca7a554 | ||
|
|
3c2c215619 | ||
|
|
a2b3a004bf | ||
|
|
f650133f83 | ||
|
|
81f9d5baa5 | ||
|
|
3938a85ff8 | ||
|
|
84cb604b19 | ||
|
|
093504d9a0 | ||
|
|
70a93a746d | ||
|
|
4326efddbf | ||
|
|
6f51b543f0 | ||
|
|
3316e49ad3 | ||
|
|
0c4a91cd18 | ||
|
|
07c6c3ebac | ||
|
|
e3a4a07616 | ||
|
|
57467139e5 | ||
|
|
e8bc53f37b | ||
|
|
3b78515fd4 | ||
|
|
6b052c3af9 | ||
|
|
37e2d46d7d | ||
|
|
a389eb5a08 | ||
|
|
d57f174ca3 | ||
|
|
e6ae42b1eb | ||
|
|
d911b9c48d | ||
|
|
3b35b7f4fa | ||
|
|
a82fb33b31 | ||
|
|
5f35a90529 | ||
|
|
81c620b61b | ||
|
|
12866eb0d6 | ||
|
|
24b8d0666e | ||
|
|
0f2c7248c8 | ||
|
|
8cc4ad3ac5 | ||
|
|
9ba90e322e | ||
|
|
d9c6b40d8e | ||
|
|
d792e1f052 | ||
|
|
2d464f8c89 | ||
|
|
456a8e3553 | ||
|
|
db94696cf0 | ||
|
|
72bb656b9e | ||
|
|
e092026eb8 | ||
|
|
e5f5749d67 | ||
|
|
6e4f8ea7e4 | ||
|
|
5e8eff24d2 | ||
|
|
36f1801eb8 | ||
|
|
e3deee57ba | ||
|
|
404deb99f4 | ||
|
|
f594bc7aea | ||
|
|
9ed948523a | ||
|
|
72d7c2d558 | ||
|
|
d4a7582955 | ||
|
|
42ac1f0cb2 | ||
|
|
4af61d432f | ||
|
|
52bdaa7bf5 | ||
|
|
96b42238c5 | ||
|
|
c7bd259739 | ||
|
|
0dfc360b1e | ||
|
|
5e578e2e4e | ||
|
|
93111d495b | ||
|
|
b1bbe240d7 | ||
|
|
bbb58ff4c6 | ||
|
|
7a710bee17 | ||
|
|
bc3903d0e0 | ||
|
|
61c8326180 | ||
|
|
c5caa4b838 | ||
|
|
ebdff73c8c | ||
|
|
f78beb71f7 | ||
|
|
74210c7f46 | ||
|
|
eff4451cdd | ||
|
|
8d0fc3e639 | ||
|
|
3736da3f89 | ||
|
|
221e464df3 | ||
|
|
bac5253169 | ||
|
|
fc2ee5cae8 | ||
|
|
e11f83f34b | ||
|
|
6e7fb9a308 | ||
|
|
b156a62a44 | ||
|
|
990e77c55f | ||
|
|
c2fb529819 | ||
|
|
337bf3b944 | ||
|
|
a49b2f4f16 | ||
|
|
ddcf1a4b2e | ||
|
|
82a932c078 | ||
|
|
0781615117 | ||
|
|
9151fc1653 | ||
|
|
3a83b21ce1 | ||
|
|
cfc042d901 | ||
|
|
211ad9e7d9 | ||
|
|
437b259829 | ||
|
|
f524bba869 | ||
|
|
2f31fb5f17 | ||
|
|
c3567cb199 | ||
|
|
ae4c1d11f7 | ||
|
|
cbab451c1a | ||
|
|
9cec17ca26 | ||
|
|
d9c5d91b6f | ||
|
|
6e194c6ffe | ||
|
|
8f0c28037b | ||
|
|
31aabcabf7 | ||
|
|
ca9531d205 | ||
|
|
794c5eb6a0 | ||
|
|
c6cc125e22 | ||
|
|
528c1ed9ba | ||
|
|
52cc603245 | ||
|
|
5e55af2dce | ||
|
|
24ee7c8928 | ||
|
|
605df09ae1 | ||
|
|
4aebef900c | ||
|
|
59c5bef165 | ||
|
|
a18adf74bf | ||
|
|
6426ab1b7e | ||
|
|
7d1de58378 | ||
|
|
82a54378d8 | ||
|
|
e6e8fada8b | ||
|
|
7b541ac322 | ||
|
|
b0a2ea3970 | ||
|
|
cb439a09dd | ||
|
|
f87ee334c2 | ||
|
|
e8f8cf8543 | ||
|
|
8c93fb747b | ||
|
|
1701e929bc | ||
|
|
1bb3e04263 | ||
|
|
e91ae19ec4 | ||
|
|
818ae08c61 | ||
|
|
15f32f2ca1 | ||
|
|
59aa62ea5c | ||
|
|
b4c292ddd0 | ||
|
|
a25655446a | ||
|
|
cf3d59bb2e | ||
|
|
f80287166e | ||
|
|
329dc40b98 | ||
|
|
8328c34a3e | ||
|
|
6c309d3bc9 | ||
|
|
24b49eee83 | ||
|
|
715fab340f | ||
|
|
8231e84d15 | ||
|
|
8dc91a7a5c | ||
|
|
4c195b1a06 | ||
|
|
c6fb6bf5f8 | ||
|
|
ed6161b80c | ||
|
|
93424e4565 | ||
|
|
ca00e42f1d | ||
|
|
357ee1c632 | ||
|
|
cb0a3eaade | ||
|
|
a20d34b8aa | ||
|
|
329910a620 | ||
|
|
c374d04b0d | ||
|
|
b68cf16c85 | ||
|
|
391c4cf099 | ||
|
|
8260599e98 | ||
|
|
3433c8b8a5 | ||
|
|
e53bc780e4 | ||
|
|
53346dbaa6 | ||
|
|
99994ea245 | ||
|
|
3ffe02a2fe | ||
|
|
9b77502360 | ||
|
|
96a97328cf | ||
|
|
1a400383c0 | ||
|
|
4f8ab19355 | ||
|
|
8919677faf | ||
|
|
858b185a0e | ||
|
|
bc738cea32 | ||
|
|
c2196f7d3a | ||
|
|
d45c27fdbd | ||
|
|
173084de19 | ||
|
|
fd2c5fa247 | ||
|
|
73e0e9bdff | ||
|
|
4442414d74 | ||
|
|
8dbf9005f0 | ||
|
|
0c3da1b498 | ||
|
|
278f5818b7 | ||
|
|
c2e85202c7 | ||
|
|
b021b12043 | ||
|
|
89a0c9f4b3 | ||
|
|
259629c300 | ||
|
|
1ce2baf7a2 | ||
|
|
4576a42a0f | ||
|
|
4fba351b92 | ||
|
|
706737245a | ||
|
|
0dbdf49075 | ||
|
|
641075539c | ||
|
|
9428d99991 | ||
|
|
f3cf2e94c4 | ||
|
|
85f13442d2 | ||
|
|
f478a65635 | ||
|
|
eca44600c5 | ||
|
|
d7a4652554 | ||
|
|
d85cdd1946 | ||
|
|
ce3aae1501 | ||
|
|
90c4f9d463 | ||
|
|
93ccd81d86 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
||||
12
.github/workflows/bootstrap.yml
vendored
12
.github/workflows/bootstrap.yml
vendored
@@ -53,10 +53,10 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
@@ -67,13 +67,17 @@ jobs:
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
|
||||
13
.github/workflows/build-containers.yml
vendored
13
.github/workflows/build-containers.yml
vendored
@@ -40,8 +40,7 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -88,16 +87,16 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -114,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -127,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
uses: actions/upload-artifact/merge@0b2256b8c012f0828dc542b3febcab082c67f72b
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
||||
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
@@ -53,6 +53,13 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
@@ -77,13 +84,8 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,6 +1,6 @@
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
flake8==7.1.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
|
||||
39
.github/workflows/unit_tests.yaml
vendored
39
.github/workflows/unit_tests.yaml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -223,8 +223,39 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
83
.github/workflows/windows_python.yml
vendored
83
.github/workflows/windows_python.yml
vendored
@@ -1,83 +0,0 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
321
CHANGELOG.md
321
CHANGELOG.md
@@ -1,3 +1,324 @@
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Compiler dependencies**
|
||||
|
||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||
other externals. We are trying to do this with the least disruption possible, so
|
||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||
this transition by the `v0.23` release in November.
|
||||
|
||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||
using a build cache. When building minimal Spack-generated container images it is
|
||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||
manager.
|
||||
|
||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||
packages. This allows us to model library soname compatibility and allows
|
||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||
provided by standalone libraries). Note that until we have an agreement in place
|
||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||
and must be downloaded outside of Spack.
|
||||
|
||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||
not try to strictly match compilers or targets of reused specs. Users can still
|
||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||
|
||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||
cache, we do not require you to also have a local configured compiler to *use* the
|
||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||
updates that happen underneath Spack.
|
||||
|
||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||
with a `libc` with the same name and a version less than or equal to that of the
|
||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||
|
||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||
external find`. External packages defining compiler paths are effectively used as
|
||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||
`spack compiler find`. More details on this transition are in
|
||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||
|
||||
2. **Improved `spack find` UI for Environments**
|
||||
|
||||
If you're working in an enviroment, you likely care about:
|
||||
|
||||
* What are the roots
|
||||
* Which ones are installed / not installed
|
||||
* What's been added that still needs to be concretized
|
||||
|
||||
We've tweaked `spack find` in environments to show this information much more
|
||||
clearly. Installation status is shown next to each root, so you can see what is
|
||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||
roots, if you don't want to look at all the installed specs.
|
||||
|
||||
More details in #42334.
|
||||
|
||||
3. **Improved command-line string quoting**
|
||||
|
||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||
something like this on the command line:
|
||||
|
||||
```
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error, but you can now write what you probably expected
|
||||
to work in the first place:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, so you can supply flags like:
|
||||
|
||||
```
|
||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||
and `==` when for flags or variants. This would not have broken before but will now
|
||||
result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
```
|
||||
|
||||
More details and discussion in #30634.
|
||||
|
||||
4. **Revert default `spack install` behavior to `--reuse`**
|
||||
|
||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||
attempt to build a new version of the requested package / any environment roots.
|
||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||
think it should be the default for a package manager.
|
||||
|
||||
We are going to try to stick to this policy:
|
||||
1. Prioritize reuse and build as little as possible by default.
|
||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||
is a known security issue that necessitates an upgrade.
|
||||
|
||||
With the install command you now have three options:
|
||||
|
||||
* `--reuse` (default): reuse as many existing installations as possible.
|
||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||
|
||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||
that it may give you fresh versions. More details in #41302 and #43988.
|
||||
|
||||
5. **More control over reused specs**
|
||||
|
||||
You can now control which packages to reuse and how. There is a new
|
||||
`concretizer:reuse` config option, which accepts the following properties:
|
||||
|
||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||
- `include`: list of constraints used to select which specs *to* reuse
|
||||
- `from`: list of sources for reused specs (some combination of `local`,
|
||||
`buildcache`, or `external`)
|
||||
|
||||
For example, to reuse only specs compiled with GCC, you could write:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
```
|
||||
|
||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
exclude: ["openmpi"]
|
||||
- type: buildcache
|
||||
exclude: ["openmpi"]
|
||||
- type: external
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **New `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
|
||||
Now there is a `redistribute()` directive so that package authors can write:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
redistribute(source=False, binary=False)
|
||||
```
|
||||
|
||||
Like other directives, this works with `when=`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
# 12.0 and higher are proprietary
|
||||
redistribute(source=False, binary=False, when="@12.0:")
|
||||
|
||||
# can't redistribute when we depend on some proprietary dependency
|
||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||
```
|
||||
|
||||
More in #20185.
|
||||
|
||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||
|
||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||
some contortions with `require:`:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
require:
|
||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||
```
|
||||
|
||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
conflict:
|
||||
- "%clang"
|
||||
prefer:
|
||||
- "+shared"
|
||||
```
|
||||
|
||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||
and #41832 for more details.
|
||||
|
||||
8. **`include_concrete` in environments**
|
||||
|
||||
You may want to build on the *concrete* contents of another environment without
|
||||
changing that environment. You can now include the concrete specs from another
|
||||
environment's `spack.lock` with `include_concrete`:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /path/to/environment1
|
||||
- /path/to/environment2
|
||||
```
|
||||
|
||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||
specs from `environment1` and `environment2`, and build on top of them without
|
||||
changing them. This is useful if you have phased deployments, where old deployments
|
||||
should not be modified but you want to use as many of them as possible. More details
|
||||
in #33768.
|
||||
|
||||
9. **`python-venv` isolation**
|
||||
|
||||
Spack has unique requirements for Python because it:
|
||||
1. installs every package in its own independent directory, and
|
||||
2. allows users to register *external* python installations.
|
||||
|
||||
External installations may contain their own installed packages that can interfere
|
||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||
from these and other issues, we now insert a small `python-venv` package in between
|
||||
`python` and packages that need to install Python code. This isolates Spack's build
|
||||
environment, isolates Spack from any issues with an external python, and resolves a
|
||||
large number of issues we've had with Python installations.
|
||||
|
||||
See #40773 for further details.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
|
||||
## Binary caches
|
||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||
* tools stack
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* spack load: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,994 total packages; 525 since `v0.21.0`
|
||||
* 178 new Python packages, 5 new R packages
|
||||
* 358 people contributed to this release
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
@@ -22,4 +22,4 @@
|
||||
#
|
||||
# This is compatible across platforms.
|
||||
#
|
||||
exec /usr/bin/env spack python "$@"
|
||||
exec spack python "$@"
|
||||
|
||||
@@ -188,25 +188,27 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
3
etc/spack/defaults/linux/packages.yaml
Normal file
3
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
packages:
|
||||
iconv:
|
||||
require: [libiconv]
|
||||
@@ -60,7 +60,7 @@ packages:
|
||||
szip: [libaec, libszip]
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
uuid: [util-linux-uuid, util-linux+uuid, libuuid]
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
|
||||
@@ -203,12 +203,9 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
||||
@@ -2344,6 +2344,27 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
options = [
|
||||
...
|
||||
'--jobs', str(make_jobs),
|
||||
]
|
||||
...
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(*options)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -5173,12 +5194,6 @@ installed executable. The check is implemented as follows:
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
|
||||
The API for adding tests is not yet considered stable and may change
|
||||
in future releases.
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""""""
|
||||
Checking build-time test results
|
||||
""""""""""""""""""""""""""""""""
|
||||
@@ -5216,38 +5231,42 @@ be left in the build stage directory as illustrated below:
|
||||
Stand-alone tests
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
While build-time tests are integrated with the build process, stand-alone
|
||||
While build-time tests are integrated with the installation process, stand-alone
|
||||
tests are expected to run days, weeks, even months after the software is
|
||||
installed. The goal is to provide a mechanism for gaining confidence that
|
||||
packages work as installed **and** *continue* to work as the underlying
|
||||
software evolves. Packages can add and inherit stand-alone tests. The
|
||||
`spack test`` command is used to manage stand-alone testing.
|
||||
``spack test`` command is used for stand-alone testing.
|
||||
|
||||
.. note::
|
||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
||||
|
||||
Execution speed is important since these tests are intended to quickly
|
||||
assess whether installed specs work on the system. Consequently, they
|
||||
should run relatively quickly -- as in on the order of at most a few
|
||||
minutes -- while ideally executing all, or at least key aspects of the
|
||||
installed software.
|
||||
assess whether installed specs work on the system. Spack cannot spare
|
||||
resources for more extensive testing of packages included in CI stacks.
|
||||
|
||||
.. note::
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, there is no reason to proceed with more resource-intensive
|
||||
tests until those have been investigated.
|
||||
|
||||
Passing stand-alone tests indicate that more thorough testing, such
|
||||
as running extensive unit or regression tests, or tests that run at
|
||||
scale can proceed without wasting resources on a problematic installation.
|
||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
||||
on the order of at most a few minutes -- while testing at least key aspects
|
||||
of the installed software. Save more extensive testing for other tools.
|
||||
|
||||
Tests are defined in the package using methods with names beginning ``test_``.
|
||||
This allows Spack to support multiple independent checks, or parts. Files
|
||||
needed for testing, such as source, data, and expected outputs, may be saved
|
||||
from the build and or stored with the package in the repository. Regardless
|
||||
of origin, these files are automatically copied to the spec's test stage
|
||||
directory prior to execution of the test method(s). Spack also provides some
|
||||
helper functions to facilitate processing.
|
||||
directory prior to execution of the test method(s). Spack also provides helper
|
||||
functions to facilitate common processing.
|
||||
|
||||
.. tip::
|
||||
|
||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
||||
|
||||
Passing stand-alone tests justify performing more thorough testing, such
|
||||
as running extensive unit or regression tests or tests that run at scale,
|
||||
when available. These tests are outside of the scope of Spack packaging.
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, no reason to proceed with more resource-intensive tests until
|
||||
the failures have been investigated.
|
||||
|
||||
.. _configure-test-stage:
|
||||
|
||||
@@ -5255,30 +5274,26 @@ helper functions to facilitate processing.
|
||||
Configuring the test stage directory
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Stand-alone tests utilize a test stage directory for building, running,
|
||||
and tracking results in the same way Spack uses a build stage directory.
|
||||
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||
``config`` of the appropriate ``config.yaml`` file such that:
|
||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
||||
tests in the same way Spack uses a build stage directory to install software.
|
||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
||||
changing the ``test_stage`` path such that:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
test_stage: /path/to/test/stage
|
||||
|
||||
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||
Other package properties that provide access to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
||||
|
||||
.. note::
|
||||
.. admonition:: Each spec being tested has its own test stage directory.
|
||||
|
||||
The test stage path is the root directory for the **entire suite**.
|
||||
In other words, it is the root directory for **all specs** being
|
||||
tested by the ``spack test run`` command. Each spec gets its own
|
||||
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
to access the spec-specific test stage directory.
|
||||
The ``config:test_stage`` option is the path to the root of a
|
||||
**test suite**'s stage directories.
|
||||
|
||||
Other package properties that provide paths to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing-files`.
|
||||
|
||||
.. _adding-standalone-tests:
|
||||
|
||||
@@ -5291,61 +5306,144 @@ Test recipes are defined in the package using methods with names beginning
|
||||
Each method has access to the information Spack tracks on the package, such
|
||||
as options, compilers, and dependencies, supporting the customization of tests
|
||||
to the build. Standard python ``assert`` statements and other error reporting
|
||||
mechanisms are available. Such exceptions are automatically caught and reported
|
||||
mechanisms can be used. These exceptions are automatically caught and reported
|
||||
as test failures.
|
||||
|
||||
Each test method is an implicit test part named by the method and whose
|
||||
purpose is the method's docstring. Providing a purpose gives context for
|
||||
aiding debugging. A test method may contain embedded test parts. Spack
|
||||
outputs the test name and purpose prior to running each test method and
|
||||
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||
As the name indicates, the first always fails. The second simply runs the
|
||||
installed example.
|
||||
Each test method is an *implicit test part* named by the method. Its purpose
|
||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
||||
start of test execution so it's also important that the docstring/purpose be
|
||||
brief.
|
||||
|
||||
.. tip::
|
||||
|
||||
We recommend naming test methods so it is clear *what* is being tested.
|
||||
For example, if a test method is building and or running an executable
|
||||
called ``example``, then call the method ``test_example``. This, together
|
||||
with a similarly meaningful test purpose, will aid test comprehension,
|
||||
debugging, and maintainability.
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
on the installed software, such as build options, dependencies, and compilers.
|
||||
Build options and dependencies are accessed using the same spec checks used
|
||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
|
||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
||||
|
||||
Spack automatically creates the test stage directory and copies
|
||||
relevant files *prior to* running tests. It can also ensure build
|
||||
dependencies are available **if** necessary.
|
||||
|
||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
||||
|
||||
Files that Spack knows to copy are those saved from the build (see
|
||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
||||
(see :ref:`cache_custom_files`).
|
||||
|
||||
Spack will use the value of the ``test_requires_compiler`` property to
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
the executable runs successfully. If the installed spec is not expected to have
|
||||
``example2``, then the check at the top of the method will raise a special
|
||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
||||
parts to tools like CDash.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
"""ensure installed example works"""
|
||||
expected = "Done."
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
# Capture stdout and stderr from running the Executable
|
||||
# and check that the expected output was produced.
|
||||
out = example(output=str.split, error=str.split)
|
||||
assert expected in out, f"Expected '{expected}' in the output"
|
||||
|
||||
def test_example2(self):
|
||||
"""run installed example2"""
|
||||
if self.spec.satisfies("@:1.0"):
|
||||
# Raise SkipTest to ensure flagging the test as skipped for
|
||||
# test reporting purposes.
|
||||
raise SkipTest("Test is only available for v1.1 on")
|
||||
|
||||
example2 = which(self.prefix.bin.example2)
|
||||
example2()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test run --alias mypackage mypackage@1.0
|
||||
$ spack test run --alias mypackage mypackage@2.0
|
||||
==> Spack test mypackage
|
||||
...
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
||||
...
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||
PASSED: MyPackage::test_example
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
||||
...
|
||||
PASSED
|
||||
PASSED: MyPackage::test_example2
|
||||
|
||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
||||
|
||||
.. note::
|
||||
Use of the package spec's installation prefix for building and running
|
||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
|
||||
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||
an example or test program that is then executed.
|
||||
Instead, start these test methods by explicitly copying the needed files
|
||||
from the installation prefix to the test stage directory. Note the test
|
||||
stage directory is the current directory when the test is executed with
|
||||
the ``spack test run`` command.
|
||||
|
||||
A test method can include test parts using the ``test_part`` context manager.
|
||||
Each part is treated as an independent check to allow subsequent test parts
|
||||
to execute even after a test part fails.
|
||||
.. admonition:: Test methods for library packages should build test executables.
|
||||
|
||||
.. _test-part:
|
||||
Stand-alone tests for library packages *should* build test executables
|
||||
that utilize the *installed* library. Doing so ensures the tests follow
|
||||
a similar build process that users of the library would follow.
|
||||
|
||||
For more information on how to do this, see :ref:`test-build-tests`.
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages with stand-alone tests, run
|
||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _adding-standalone-test-parts:
|
||||
|
||||
"""""""""""""""""""""""""""""
|
||||
Adding stand-alone test parts
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
Sometimes dependencies between steps of a test lend themselves to being
|
||||
broken into parts. Tracking the pass/fail status of each part may aid
|
||||
debugging. Spack provides a ``test_part`` context manager for use within
|
||||
test methods.
|
||||
|
||||
Each test part is independently run, tracked, and reported. Test parts are
|
||||
executed in the order they appear. If one fails, subsequent test parts are
|
||||
still performed even if they would also fail. This allows tools like CDash
|
||||
to track and report the status of test parts across runs. The pass/fail status
|
||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
||||
|
||||
.. admonition:: Test method and test part names **must** be unique.
|
||||
|
||||
Test results reporting requires that test methods and embedded test parts
|
||||
within a package have unique names.
|
||||
|
||||
The signature for ``test_part`` is:
|
||||
|
||||
@@ -5367,40 +5465,68 @@ where each argument has the following meaning:
|
||||
* ``work_dir`` is the path to the directory in which the test will run.
|
||||
|
||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
||||
|
||||
.. admonition:: Tests should **not** run under the installation directory.
|
||||
.. admonition:: Start test part names with the name of the enclosing test.
|
||||
|
||||
Use of the package spec's installation directory for building and running
|
||||
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
We **highly recommend** starting the names of test parts with the name
|
||||
of the enclosing test. Doing so helps with the comprehension, readability
|
||||
and debugging of test results.
|
||||
|
||||
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||
These checks can be implemented as separate checks or, as illustrated below,
|
||||
embedded test parts.
|
||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
||||
specific order since the outputs from one are inputs of others. Further suppose
|
||||
we want to add an integration test that runs the executables in order. We can
|
||||
accomplish this goal by implementing a stand-alone test method consisting of
|
||||
test parts for each executable as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_example(self):
|
||||
"""run installed examples"""
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
def test_series(self):
|
||||
"""run setup, perform, and report"""
|
||||
|
||||
In this case, there will be an implicit test part for ``test_example``
|
||||
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||
will be executed regardless of whether the first passes. The test
|
||||
log for a run where the first executable fails and the second passes
|
||||
is illustrated below.
|
||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
||||
exe = which(self.prefix.bin.setup))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
||||
exe = which(self.prefix.bin.run))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_report", purpose="generate report"):
|
||||
exe = which(self.prefix.bin.report))
|
||||
exe()
|
||||
|
||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
||||
``run``, and ``report``. In this case no options are passed to any of the
|
||||
executables and no outputs from running them are checked. Consequently, the
|
||||
implementation could be simplified with a for-loop as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""execute series setup, run, and report"""
|
||||
|
||||
for exe, reason in [
|
||||
("setup", "setup operation"),
|
||||
("run", "perform operation"),
|
||||
("report", "generate report")
|
||||
]:
|
||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
||||
exe = which(self.prefix.bin.join(exe))
|
||||
exe()
|
||||
|
||||
In both cases, since we're using a context manager, each test part in
|
||||
``test_series`` will execute regardless of the status of the other test
|
||||
parts.
|
||||
|
||||
Now let's look at the output from running the stand-alone tests where
|
||||
the second test part, ``test_series_run``, fails.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -5410,50 +5536,68 @@ is illustrated below.
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
||||
...
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||
PASSED: MyPackage::test_series_setup
|
||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
||||
...
|
||||
PASSED
|
||||
FAILED: MyPackage::test_series_run
|
||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
||||
...
|
||||
FAILED: MyPackage::test_series_report
|
||||
FAILED: MyPackage::test_series
|
||||
...
|
||||
|
||||
.. warning::
|
||||
Since test parts depended on the success of previous parts, we see that the
|
||||
failure of one results in the failure of subsequent checks and the overall
|
||||
result of the test method, ``test_series``, is failure.
|
||||
|
||||
Test results reporting requires that each test method and embedded
|
||||
test part for a package have a unique name.
|
||||
.. tip::
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
Spack has on how the software was built, such as build options, dependencies,
|
||||
and compilers. Build options and dependencies are accessed with the normal
|
||||
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
Accessing compilers in stand-alone tests that are used by the build requires
|
||||
setting a package property as described :ref:`below <test-compilation>`.
|
||||
If you want to see more examples from packages using ``test_part``, run
|
||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _test-build-tests:
|
||||
|
||||
.. _test-compilation:
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Building and running test executables
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
Enabling test compilation
|
||||
"""""""""""""""""""""""""
|
||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
||||
|
||||
If you want to build and run binaries in tests, then you'll need to tell
|
||||
Spack to load the package's compiler configuration. This is accomplished
|
||||
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||
We **highly recommend** re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities when they reside
|
||||
within the software's repository. More information on saving files from
|
||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
||||
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`cache_custom_files`). It will be important to
|
||||
remember to maintain them so they work across listed or supported versions
|
||||
of the package.
|
||||
|
||||
.. note::
|
||||
Packages that build libraries are good examples of cases where you'll want
|
||||
to build test executables from the installed software before running them.
|
||||
Doing so requires you to let Spack know it needs to load the package's
|
||||
compiler configuration. This is accomplished by setting the package's
|
||||
``test_requires_compiler`` property to ``True``.
|
||||
|
||||
The ``test_requires_compiler`` property should be added at the top of
|
||||
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
||||
|
||||
Below illustrates using this feature to compile an example.
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
||||
|
||||
Be sure to add the property at the top of the package class under other
|
||||
properties like the ``homepage``.
|
||||
|
||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
||||
illustrates the basic process of compiling a test executable using the
|
||||
installed library before running it.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5477,28 +5621,22 @@ Below illustrates using this feature to compile an example.
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
|
||||
Typically the files used to build and or run test executables are either
|
||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
||||
preventing the use of both.
|
||||
|
||||
.. _cache_extra_test_sources:
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Saving build-time files
|
||||
"""""""""""""""""""""""
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Saving build- and install-time files
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
.. note::
|
||||
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
||||
directories and or files from the source build stage directory to the
|
||||
package's installation directory. Spack will automatically copy these
|
||||
files for you when it sets up the test stage directory and before it
|
||||
begins running the tests.
|
||||
|
||||
The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
@@ -5513,46 +5651,69 @@ where each argument has the following meaning:
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
relative paths under the test stage directory prior to executing stand-alone
|
||||
tests.
|
||||
.. warning::
|
||||
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
and using ``foo.c`` in a test method is illustrated below.
|
||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
||||
staged source directory. They will be copied to the equivalent relative
|
||||
location under the test stage directory prior to test execution.
|
||||
|
||||
Contents of subdirectories and files are copied to a special test cache
|
||||
subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the post-``install`` copy method
|
||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
||||
the amount of unnecessary work in the test method **and** avoid problems
|
||||
running stand-alone tests in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes
|
||||
(see :ref:`file-filtering`).
|
||||
|
||||
Below is a basic example of a test that relies on files from the installation.
|
||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
||||
which is assumed to have all of the files implemented to allow ``make`` to
|
||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
||||
library.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyLibPackage(Package):
|
||||
class MyLibPackage(MakefilePackage):
|
||||
...
|
||||
|
||||
@run_after("install")
|
||||
def copy_test_files(self):
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
cache_extra_test_sources(self, srcs)
|
||||
cache_extra_test_sources(self, "examples")
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
def test_example(self):
|
||||
"""build and run the examples"""
|
||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(examples_dir):
|
||||
make = which("make")
|
||||
make()
|
||||
|
||||
In this case, the method copies the associated files from the build
|
||||
stage, **after** the software is installed, to the package's test
|
||||
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||
before running the program.
|
||||
for program in ["foo", "bar"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{program}",
|
||||
purpose=f"ensure {program} runs"
|
||||
):
|
||||
exe = Executable(program)
|
||||
exe()
|
||||
|
||||
In this case, ``copy_test_files`` copies the associated files from the
|
||||
build stage to the package's test cache directory under the installation
|
||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
||||
the directory and its contents to the the test stage directory. The
|
||||
``working_dir`` context manager ensures the commands within it are executed
|
||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -5561,43 +5722,18 @@ before running the program.
|
||||
|
||||
The key to copying files for stand-alone testing at build time is use
|
||||
of the ``run_after`` directive, which ensures the associated files are
|
||||
copied **after** the provided build stage where the files **and**
|
||||
installation prefix are available.
|
||||
copied **after** the provided build stage (``install``) when the installation
|
||||
prefix **and** files are available.
|
||||
|
||||
These paths are **automatically copied** from cache to the test stage
|
||||
directory prior to the execution of any stand-alone tests. Tests access
|
||||
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
The test method uses the path contained in the package's
|
||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
||||
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
While source and input files are generally recommended, binaries
|
||||
**may** also be cached by the build process. Only you, as the package
|
||||
writer or maintainer, know whether these files would be appropriate
|
||||
for testing the installed software weeks to months later.
|
||||
|
||||
.. note::
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes.
|
||||
See :ref:`file manipulation <file-manipulation>`.
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that cache build files, run
|
||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
@@ -5605,8 +5741,9 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
Adding custom files
|
||||
"""""""""""""""""""
|
||||
|
||||
In some cases it can be useful to have files that can be used to build or
|
||||
check the results of tests. Examples include:
|
||||
Sometimes it is helpful or necessary to include custom files for building and
|
||||
or checking the results of tests as part of the package. Examples of the types
|
||||
of files that might be useful are:
|
||||
|
||||
- test source files
|
||||
- test input files
|
||||
@@ -5614,17 +5751,15 @@ check the results of tests. Examples include:
|
||||
- expected test outputs
|
||||
|
||||
While obtaining such files from the software repository is preferred (see
|
||||
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||
circumstances where that is not feasible (e.g., the software is not being
|
||||
actively maintained). When test files can't be obtained from the repository
|
||||
or as a supplement to files that can, Spack supports the inclusion of
|
||||
additional files under the ``test`` subdirectory of the package in the
|
||||
Spack repository.
|
||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
||||
feasible such as when the software is not being actively maintained. When test
|
||||
files cannot be obtained from the repository or there is a need to supplement
|
||||
files that can, Spack supports the inclusion of additional files under the
|
||||
``test`` subdirectory of the package in the Spack repository.
|
||||
|
||||
Spack **automatically copies** the contents of that directory to the
|
||||
test staging directory prior to running stand-alone tests. Test methods
|
||||
access those files using the ``self.test_suite.current_test_data_dir``
|
||||
property as shown below.
|
||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
||||
be compiled and linked against the installed ``MyLibrary`` software.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5634,17 +5769,29 @@ property as shown below.
|
||||
test_requires_compiler = True
|
||||
...
|
||||
|
||||
def test_example(self):
|
||||
def test_custom_example(self):
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
src_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
custom_example = which(exe)
|
||||
custom_example()
|
||||
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
|
||||
custom_example = Executable(exe)
|
||||
custom_example()
|
||||
|
||||
In this case, ``spack test run`` for the package results in Spack copying
|
||||
the contents of the ``test`` subdirectory to the test stage directory path
|
||||
in ``self.test_suite.current_test_data_dir`` before calling
|
||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
||||
ensures the commands to build and run the program are performed from
|
||||
within the appropriate subdirectory of the test stage.
|
||||
|
||||
.. _expected_test_output_from_file:
|
||||
|
||||
@@ -5653,9 +5800,8 @@ Reading expected output from a file
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
to retrieve properly formatted text from a file potentially containing
|
||||
special characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
@@ -5665,10 +5811,13 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||
property. The example below illustrates how to read a file that was
|
||||
added to the package's ``test`` subdirectory.
|
||||
The path provided to ``filename`` for one of the copied custom files
|
||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
||||
``self.test_suite.current_test_data_dir``.
|
||||
|
||||
The example below shows how to reference both the custom database
|
||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
||||
to the test stage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5690,8 +5839,9 @@ added to the package's ``test`` subdirectory.
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
If the files were instead cached from installing the software, the paths to the
|
||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
||||
directory as shown below:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5699,17 +5849,24 @@ source code, the path would be obtained as shown below.
|
||||
"""check example table dump"""
|
||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
||||
...
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
Alternatively, if both files had been installed by the software into the
|
||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
||||
two files would be referenced as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_example(self):
|
||||
"""check example table dump"""
|
||||
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||
|
||||
db_filename = self.prefix.share.tests.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(
|
||||
self.prefix.share.tests.join("dump.out")
|
||||
)
|
||||
...
|
||||
|
||||
.. _check_outputs:
|
||||
|
||||
@@ -5717,9 +5874,9 @@ follows:
|
||||
Comparing expected to actual outputs
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``check_outputs`` is available for packages to ensure
|
||||
the expected outputs from running an executable are contained within the
|
||||
actual outputs.
|
||||
The ``check_outputs`` helper routine is available for packages to ensure
|
||||
multiple expected outputs from running an executable are contained within
|
||||
the actual outputs.
|
||||
|
||||
The signature for ``check_outputs`` is:
|
||||
|
||||
@@ -5745,11 +5902,17 @@ Invoking the method is the equivalent of:
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that use this helper, run
|
||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
Accessing package- and test-related files
|
||||
Finding package- and test-related files
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You may need to access files from one or more locations when writing
|
||||
@@ -5758,8 +5921,7 @@ include test source files or includes them but has no way to build the
|
||||
executables using the installed headers and libraries. In these cases
|
||||
you may need to reference the files relative to one or more root directory.
|
||||
The table below lists relevant path properties and provides additional
|
||||
examples of their use.
|
||||
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
||||
examples of accessing files saved from the software repository, package
|
||||
repository, and installation.
|
||||
|
||||
@@ -5788,7 +5950,6 @@ repository, and installation.
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
|
||||
|
||||
.. _inheriting-tests:
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
@@ -5831,7 +5992,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
.. warning::
|
||||
|
||||
Any package that implements a test method with the same name as an
|
||||
inherited method overrides the inherited method. If that is not the
|
||||
inherited method will override the inherited method. If that is not the
|
||||
goal and you are not explicitly calling and adding functionality to
|
||||
the inherited method for the test, then make sure that all test methods
|
||||
and embedded test parts have unique test names.
|
||||
@@ -5996,6 +6157,8 @@ running:
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
||||
.. _file-filtering:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -253,17 +253,6 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx_design==0.6.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.1
|
||||
urllib3==2.2.2
|
||||
pytest==8.2.2
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
flake8==7.1.0
|
||||
mypy==1.10.1
|
||||
|
||||
@@ -33,8 +33,23 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
|
||||
@@ -791,7 +791,7 @@ def check_virtual_with_variants(spec, msg):
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
|
||||
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
platform_scope = spack.config.DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
|
||||
@@ -72,6 +72,7 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
@@ -458,10 +459,7 @@ def set_wrapper_variables(pkg, env):
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
ccache = Executable("ccache")
|
||||
if not ccache:
|
||||
raise RuntimeError("No ccache binary found in PATH")
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -740,7 +738,9 @@ def get_rpaths(pkg):
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
@@ -828,7 +828,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
return env_base
|
||||
|
||||
|
||||
class EnvironmentVisitor(traverse.AbstractVisitor):
|
||||
class EnvironmentVisitor:
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
# than for their deps, depending on the context.
|
||||
@@ -846,7 +846,7 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item[0].spec
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
depflag = self.root_depflag
|
||||
else:
|
||||
|
||||
@@ -162,7 +162,9 @@ def initconfig_compiler_entries(self):
|
||||
ld_flags = " ".join(flags["ldflags"])
|
||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||
# CMake has separate linker arguments for types of builds.
|
||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||
ld_string = ld_format_string.format(ld_type)
|
||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||
|
||||
|
||||
@@ -136,14 +136,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
@@ -34,6 +34,8 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -551,10 +553,9 @@ def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
output_file,
|
||||
*,
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
run_optimizer=False,
|
||||
use_dependencies=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
@@ -575,12 +576,6 @@ def generate_gitlab_ci_yaml(
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
@@ -814,7 +809,8 @@ def ensure_expected_target_path(path):
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
if not s.writable
|
||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1271,17 +1267,6 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1310,8 +1295,11 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections.abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get("needs")
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job["needs"]
|
||||
|
||||
new_job["dependencies"] = list(
|
||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||
)
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
||||
@@ -1,363 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||
)
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
|
||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||
(obj[key] matches proto[key]) for every key in proto.
|
||||
|
||||
If obj and proto are sequences, obj matches proto if they are of the same
|
||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||
|
||||
Otherwise, obj matches proto if obj == proto.
|
||||
|
||||
Precondition: proto must not have any reference cycles
|
||||
"""
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
if not isinstance(proto, collections.abc.Mapping):
|
||||
return False
|
||||
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
if len(obj) != len(proto):
|
||||
return False
|
||||
|
||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||
|
||||
return obj == proto
|
||||
|
||||
|
||||
def subkeys(obj, proto):
|
||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||
common with the prototype mapping "proto".
|
||||
|
||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||
for every such key, "k", its corresponding value is:
|
||||
|
||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||
or
|
||||
- a[key] otherwise
|
||||
|
||||
|
||||
If obj and proto are mappings, the returned object is the smallest object,
|
||||
"a", such that merge(a, proto) matches obj.
|
||||
|
||||
Otherwise, obj is returned.
|
||||
"""
|
||||
if not (
|
||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||
):
|
||||
return obj
|
||||
|
||||
new_obj = {}
|
||||
for key, value in obj.items():
|
||||
if key not in proto:
|
||||
new_obj[key] = value
|
||||
continue
|
||||
|
||||
if matches(value, proto[key]) and matches(proto[key], value):
|
||||
continue
|
||||
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
new_obj[key] = subkeys(value, proto[key])
|
||||
continue
|
||||
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
def add_extends(yaml, key):
|
||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||
whose value features "key".
|
||||
|
||||
If "extends" is not in yaml, then yaml is modified such that
|
||||
yaml["extends"] == key.
|
||||
|
||||
If yaml["extends"] is a str, then yaml is modified such that
|
||||
yaml["extends"] == [yaml["extends"], key]
|
||||
|
||||
If yaml["extends"] is a list that does not include key, then key is
|
||||
appended to the list.
|
||||
|
||||
Otherwise, yaml is left unchanged.
|
||||
"""
|
||||
|
||||
has_key = "extends" in yaml
|
||||
extends = yaml.get("extends")
|
||||
|
||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||
return
|
||||
|
||||
if extends is None:
|
||||
yaml["extends"] = key
|
||||
return
|
||||
|
||||
if isinstance(extends, str):
|
||||
if extends != key:
|
||||
yaml["extends"] = [extends, key]
|
||||
return
|
||||
|
||||
if key not in extends:
|
||||
extends.append(key)
|
||||
|
||||
|
||||
def common_subobject(yaml, sub):
|
||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||
|
||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||
|
||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||
- Otherwise, new[key] = yaml[key].
|
||||
|
||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||
None) is returned. The yaml object is returned unchanged.
|
||||
|
||||
Otherwise, each matching value in new is modified as in
|
||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||
The common_key value is chosen such that it does not match any preexisting
|
||||
key in new. In this case, (new, common_key) is returned.
|
||||
"""
|
||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||
|
||||
if not match_list:
|
||||
return yaml, None
|
||||
|
||||
common_prefix = ".c"
|
||||
common_index = 0
|
||||
|
||||
while True:
|
||||
common_key = "".join((common_prefix, str(common_index)))
|
||||
if common_key not in yaml:
|
||||
break
|
||||
common_index += 1
|
||||
|
||||
new_yaml = {}
|
||||
|
||||
for key, val in yaml.items():
|
||||
new_yaml[key] = copy.deepcopy(val)
|
||||
|
||||
if not matches(val, sub):
|
||||
continue
|
||||
|
||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||
add_extends(new_yaml[key], common_key)
|
||||
|
||||
new_yaml[common_key] = sub
|
||||
|
||||
return new_yaml, common_key
|
||||
|
||||
|
||||
def print_delta(name, old, new, applied=None):
|
||||
delta = new - old
|
||||
reldelta = (1000 * delta) // old
|
||||
reldelta = (reldelta // 10, reldelta % 10)
|
||||
|
||||
if applied is None:
|
||||
applied = new <= old
|
||||
|
||||
print(
|
||||
"\n".join(
|
||||
(
|
||||
"{0} {1}:",
|
||||
" before: {2: 10d}",
|
||||
" after : {3: 10d}",
|
||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||
)
|
||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||
)
|
||||
|
||||
|
||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
"""Try applying an optimization pass and return information about the
|
||||
result
|
||||
|
||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||
string, summary statistics are also printed to stdout.
|
||||
|
||||
"yaml" is the object to apply the pass to.
|
||||
|
||||
"optimization_pass" is the function implementing the pass to be applied.
|
||||
|
||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||
pass. The pass is applied as
|
||||
|
||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||
|
||||
The pass's results are greedily rejected if it does not modify the original
|
||||
yaml document, or if it produces a yaml document that serializes to a
|
||||
larger string.
|
||||
|
||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||
(yaml, new_yaml, applied, other_results) otherwise.
|
||||
"""
|
||||
result = optimization_pass(yaml, *args, **kwargs)
|
||||
new_yaml, other_results = result[0], result[1:]
|
||||
|
||||
if new_yaml is yaml:
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = post_size <= pre_size
|
||||
if applied:
|
||||
yaml, new_yaml = new_yaml, yaml
|
||||
|
||||
if name:
|
||||
print_delta(name, pre_size, post_size, applied)
|
||||
|
||||
return (yaml, new_yaml, applied, other_results)
|
||||
|
||||
|
||||
def build_histogram(iterator, key):
|
||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||
|
||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||
considered.
|
||||
|
||||
Returns a list of tuples (hash, count, proportion, value), where
|
||||
|
||||
- "hash" is a sha1sum hash of the value.
|
||||
- "count" is the number of occurences of values that hash to "hash".
|
||||
- "proportion" is the proportion of all values considered above that
|
||||
hash to "hash".
|
||||
- "value" is one of the values considered above that hash to "hash".
|
||||
Which value is chosen when multiple values hash to the same "hash" is
|
||||
undefined.
|
||||
|
||||
The list is sorted in descending order by count, yielding the most
|
||||
frequently occuring hashes first.
|
||||
"""
|
||||
buckets = collections.defaultdict(int)
|
||||
values = {}
|
||||
|
||||
num_objects = 0
|
||||
for obj in iterator:
|
||||
num_objects += 1
|
||||
|
||||
try:
|
||||
val = obj[key]
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
values[value_hash] = val
|
||||
|
||||
return [
|
||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||
]
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||
}
|
||||
|
||||
# look for a list of tags that appear frequently
|
||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||
|
||||
# If a list of tags is found, and there are more than one job that uses it,
|
||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||
# add the list to the prototype object.
|
||||
if tags and count > 1 and proportion >= 0.70:
|
||||
common_job["tags"] = tags
|
||||
|
||||
# apply common object factorization
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"general common object factorization", yaml, common_subobject, common_job
|
||||
)
|
||||
|
||||
# look for a common script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"script factorization", yaml, common_subobject, {"script": script}
|
||||
)
|
||||
|
||||
# look for a common before_script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||
)
|
||||
|
||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||
# Try to factor them out.
|
||||
h = build_histogram(
|
||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||
"SPACK_ROOT_SPEC",
|
||||
)
|
||||
|
||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||
# environment variable; not just the one that appears with the greatest
|
||||
# frequency. We only require that more than 1 job uses a given instance's
|
||||
# value, because we expect the value to be very large, and so expect even
|
||||
# few-to-one factorizations to yield large space savings.
|
||||
counter = 0
|
||||
for _, count, proportion, spec in h:
|
||||
if count <= 1:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||
yaml,
|
||||
common_subobject,
|
||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||
)
|
||||
|
||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print("\n")
|
||||
print_delta("overall summary", original_size, new_size)
|
||||
print("\n")
|
||||
return yaml
|
||||
@@ -444,7 +444,7 @@ def format_list(specs):
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
for scope in spack.config.CONFIG.writable_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -73,7 +74,7 @@ def setup_parser(subparser):
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
@@ -81,7 +82,7 @@ def setup_parser(subparser):
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
@@ -200,6 +201,18 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
@@ -212,8 +225,6 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
@@ -234,8 +245,6 @@ def ci_generate(args):
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
@@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor(traverse.AbstractVisitor):
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context: Context = Context.BUILD):
|
||||
if context == Context.BUILD:
|
||||
# TODO: run deps shouldn't be required for build env.
|
||||
@@ -53,27 +53,27 @@ def __init__(self, context: Context = Context.BUILD):
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
def accept(self, item: traverse.EdgeAndDepth) -> bool:
|
||||
def accept(self, item):
|
||||
# The root may be installed or uninstalled.
|
||||
if item[1] == 0:
|
||||
if item.depth == 0:
|
||||
return True
|
||||
|
||||
# Early exit after we've seen an uninstalled dep.
|
||||
if self.has_uninstalled_deps:
|
||||
return False
|
||||
|
||||
spec = item[0].spec
|
||||
spec = item.edge.spec
|
||||
if not spec.external and not spec.installed:
|
||||
self.has_uninstalled_deps = True
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def neighbors(self, item: traverse.EdgeAndDepth):
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
depflag = self.direct_deps if item[1] == 0 else dt.LINK | dt.RUN
|
||||
return item[0].spec.edges_to_dependencies(depflag=depflag)
|
||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context: Context, args):
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.environment as ev
|
||||
@@ -43,5 +46,9 @@ def concretize(parser, args):
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
else:
|
||||
tty.msg("No new specs to concretize.")
|
||||
env.write()
|
||||
|
||||
@@ -264,7 +264,9 @@ def config_remove(args):
|
||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
if isinstance(scope, spack.config.SingleFileScope):
|
||||
return fs.can_access(cfg_file)
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
return False
|
||||
|
||||
|
||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||
@@ -362,14 +364,11 @@ def config_change(args):
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
updates: List[spack.config.ConfigScope] = [
|
||||
x
|
||||
for x in spack.config.CONFIG.format_updates[args.section]
|
||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
||||
]
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
@@ -447,7 +446,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -934,7 +933,7 @@ def get_repository(args, name):
|
||||
# Figure out where the new package should live
|
||||
repo_path = args.repo
|
||||
if repo_path is not None:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
repo = spack.repo.from_path(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die(
|
||||
"Can't create package with namespace {0} in repo with "
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -69,13 +71,15 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage[0].disable_mirrors()
|
||||
source_stage.disable_mirrors()
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
|
||||
@@ -11,43 +12,13 @@
|
||||
import spack.cmd
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.spec import Spec
|
||||
from spack.util.editor import editor
|
||||
import spack.util.editor
|
||||
|
||||
description = "open package files in $EDITOR"
|
||||
section = "packaging"
|
||||
level = "short"
|
||||
|
||||
|
||||
def edit_package(name, repo_path, namespace):
|
||||
"""Opens the requested package file in your favorite $EDITOR.
|
||||
|
||||
Args:
|
||||
name (str): The name of the package
|
||||
repo_path (str): The path to the repository containing this package
|
||||
namespace (str): A valid namespace registered with Spack
|
||||
"""
|
||||
# Find the location of the package
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||
if not os.access(path, os.R_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
else:
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
|
||||
editor(path)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
excl_args = subparser.add_mutually_exclusive_group()
|
||||
|
||||
@@ -98,41 +69,67 @@ def setup_parser(subparser):
|
||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||
|
||||
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
||||
|
||||
|
||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise spack.repo.UnknownPackageError(name) from e
|
||||
tty.die(f"Cannot edit package: {e}")
|
||||
|
||||
|
||||
def locate_file(name: str, path: str) -> str:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
file_path = os.path.join(path, name)
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
tty.die(f"Cannot edit file: {e}")
|
||||
pass
|
||||
|
||||
# Otherwise try to find a file that starts with the name
|
||||
candidates = glob.glob(file_path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
||||
if len(files) > 1:
|
||||
tty.die(
|
||||
f"Multiple files start with `{name}`:\n"
|
||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
||||
)
|
||||
elif not files:
|
||||
tty.die(f"No file for '{name}' was found in {path}")
|
||||
return files[0]
|
||||
|
||||
|
||||
def edit(parser, args):
|
||||
name = args.package
|
||||
|
||||
# By default, edit package files
|
||||
path = spack.paths.packages_path
|
||||
names = args.package
|
||||
|
||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||
if args.path:
|
||||
path = args.path
|
||||
if name:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
path = os.path.join(path, name)
|
||||
if not os.path.exists(path):
|
||||
files = glob.glob(path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||
if len(files) > 1:
|
||||
m = "Multiple files exist with the name {0}.".format(name)
|
||||
m += " Please specify a suffix. Files are:\n\n"
|
||||
for f in files:
|
||||
m += " " + os.path.basename(f) + "\n"
|
||||
tty.die(m)
|
||||
if not files:
|
||||
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||
path = files[0] # already confirmed only one entry in files
|
||||
|
||||
editor(path)
|
||||
elif name:
|
||||
edit_package(name, args.repo, args.namespace)
|
||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
||||
spack.util.editor.editor(*paths)
|
||||
elif names:
|
||||
if args.repo:
|
||||
repo = spack.repo.from_path(args.repo)
|
||||
elif args.namespace:
|
||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
paths = [locate_package(name, repo) for name in names]
|
||||
spack.util.editor.editor(*paths)
|
||||
else:
|
||||
# By default open the directory where packages live
|
||||
editor(path)
|
||||
spack.util.editor.editor(spack.paths.packages_path)
|
||||
|
||||
@@ -56,7 +56,6 @@ def roots_from_environments(args, active_env):
|
||||
|
||||
# -e says "also preserve things needed by this particular env"
|
||||
for env_name_or_dir in args.except_environment:
|
||||
print("HMM", env_name_or_dir)
|
||||
if ev.exists(env_name_or_dir):
|
||||
env = ev.read(env_name_or_dir)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.string import plural
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.build_environment
|
||||
@@ -375,7 +376,9 @@ def _maybe_add_and_concretize(args, env, specs):
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
|
||||
@@ -91,7 +91,7 @@ def repo_add(args):
|
||||
tty.die("Not a Spack repository: %s" % path)
|
||||
|
||||
# Make sure it's actually a spack repository by constructing it.
|
||||
repo = spack.repo.Repo(canon_path)
|
||||
repo = spack.repo.from_path(canon_path)
|
||||
|
||||
# If that succeeds, finally add it to the configuration.
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
@@ -124,7 +124,7 @@ def repo_remove(args):
|
||||
# If it is a namespace, remove corresponding repo
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.Repo(path)
|
||||
repo = spack.repo.from_path(path)
|
||||
if repo.namespace == namespace_or_path:
|
||||
repos.remove(path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
@@ -142,7 +142,7 @@ def repo_list(args):
|
||||
repos = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.Repo(r))
|
||||
repos.append(spack.repo.from_path(r))
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
|
||||
|
||||
@@ -114,15 +114,16 @@ def _process_result(result, show, required_format, kwargs):
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if "solutions" in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
if required_format:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
|
||||
@@ -105,11 +105,19 @@ def spec(parser, args):
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With -y, just print YAML to output.
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
|
||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
if args.specs:
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||
|
||||
@@ -38,10 +38,10 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.filesystem_view as fsv
|
||||
import spack.schema.projections
|
||||
import spack.store
|
||||
from spack.config import validate
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
@@ -193,17 +193,13 @@ def view(parser, args):
|
||||
ordered_projections = {}
|
||||
|
||||
# What method are we using for this view
|
||||
if args.action in actions_link:
|
||||
link_fn = view_func_parser(args.action)
|
||||
else:
|
||||
link_fn = view_func_parser("symlink")
|
||||
|
||||
view = YamlFilesystemView(
|
||||
link_type = args.action if args.action in actions_link else "symlink"
|
||||
view = fsv.YamlFilesystemView(
|
||||
path,
|
||||
spack.store.STORE.layout,
|
||||
projections=ordered_projections,
|
||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||
link=link_fn,
|
||||
link_type=link_type,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
|
||||
|
||||
@@ -260,7 +260,7 @@ def _init_compiler_config(
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
for scope in config.writable_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
@@ -117,21 +117,39 @@
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
"""This class represents a configuration scope.
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.writable = False
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
A scope is one directory containing named configuration files.
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||
"""
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, name, path) -> None:
|
||||
self.name = name # scope name.
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
raise NotImplementedError
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return os.sep in self.name
|
||||
return False
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}>"
|
||||
|
||||
|
||||
class DirectoryConfigScope(ConfigScope):
|
||||
"""Config scope backed by a directory containing one file per section."""
|
||||
|
||||
def __init__(self, name: str, path: str, *, writable: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
"""Returns the filename associated with a given section"""
|
||||
@@ -148,6 +166,9 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
if data is None:
|
||||
@@ -164,19 +185,23 @@ def _write_section(self, section: str) -> None:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return "/" in self.name
|
||||
|
||||
|
||||
class SingleFileScope(ConfigScope):
|
||||
"""This class represents a configuration scope in a single YAML file."""
|
||||
|
||||
def __init__(
|
||||
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||
self,
|
||||
name: str,
|
||||
path: str,
|
||||
schema: YamlConfigDict,
|
||||
*,
|
||||
yaml_path: Optional[List[str]] = None,
|
||||
writable: bool = True,
|
||||
) -> None:
|
||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||
|
||||
@@ -195,15 +220,13 @@ def __init__(
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super().__init__(name, path)
|
||||
super().__init__(name)
|
||||
self._raw_data: Optional[YamlConfigDict] = None
|
||||
self.schema = schema
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section) -> str:
|
||||
return self.path
|
||||
|
||||
@@ -257,6 +280,8 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
@@ -301,19 +326,6 @@ def __repr__(self) -> str:
|
||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class ImmutableConfigScope(ConfigScope):
|
||||
"""A configuration scope that cannot be written to.
|
||||
|
||||
This is used for ConfigScopes passed on the command line.
|
||||
"""
|
||||
|
||||
def _write_section(self, section) -> None:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class InternalConfigScope(ConfigScope):
|
||||
"""An internal configuration scope that is not persisted to a file.
|
||||
|
||||
@@ -323,7 +335,7 @@ class InternalConfigScope(ConfigScope):
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
super().__init__(name, None)
|
||||
super().__init__(name)
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
if data is not None:
|
||||
@@ -333,9 +345,6 @@ def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Just reads from an internal dictionary."""
|
||||
if section not in self.sections:
|
||||
@@ -440,27 +449,21 @@ def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
return scope
|
||||
|
||||
@property
|
||||
def file_scopes(self) -> List[ConfigScope]:
|
||||
"""List of writable scopes with an associated file."""
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
||||
"""Generator of writable scopes with an associated file."""
|
||||
return (s for s in self.scopes.values() if s.writable)
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
"""Non-internal scope with highest precedence."""
|
||||
return next(reversed(self.file_scopes))
|
||||
"""Writable scope with highest precedence."""
|
||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
||||
|
||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||
"""Non-internal non-platform scope with highest precedence
|
||||
|
||||
Platform-specific scopes are of the form scope/platform"""
|
||||
generator = reversed(self.file_scopes)
|
||||
highest = next(generator)
|
||||
while highest and highest.is_platform_dependent:
|
||||
highest = next(generator)
|
||||
return highest
|
||||
"""Writable non-platform scope with highest precedence"""
|
||||
return next(
|
||||
s
|
||||
for s in reversed(self.scopes.values()) # type: ignore
|
||||
if s.writable and not s.is_platform_dependent
|
||||
)
|
||||
|
||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||
"""
|
||||
@@ -755,13 +758,14 @@ def override(
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
plat_name = os.path.join(name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
scope = DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||
)
|
||||
cfg.push_scope(scope)
|
||||
|
||||
|
||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
@@ -806,8 +810,8 @@ def _add_command_line_scopes(
|
||||
|
||||
# name based on order on the command line
|
||||
name = f"cmd_scope_{i:d}"
|
||||
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
||||
_add_platform_scope(cfg, name, path, writable=False)
|
||||
|
||||
|
||||
def create() -> Configuration:
|
||||
@@ -851,10 +855,10 @@ def create() -> Configuration:
|
||||
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
cfg.push_scope(ConfigScope(name, path))
|
||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
||||
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
_add_platform_scope(cfg, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
@@ -969,7 +973,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
@@ -978,19 +982,10 @@ def scopes() -> Dict[str, ConfigScope]:
|
||||
|
||||
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""
|
||||
Return list of writable scopes. Higher-priority scopes come first in the
|
||||
list.
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
list(
|
||||
x
|
||||
for x in CONFIG.scopes.values()
|
||||
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||
)
|
||||
)
|
||||
)
|
||||
"""Return list of writable scopes. Higher-priority scopes come first in the list."""
|
||||
scopes = [x for x in CONFIG.scopes.values() if x.writable]
|
||||
scopes.reverse()
|
||||
return scopes
|
||||
|
||||
|
||||
def writable_scope_names() -> List[str]:
|
||||
@@ -1599,7 +1594,7 @@ def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuratio
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
scopes.append(DirectoryConfigScope(name, path))
|
||||
|
||||
configuration = Configuration(*scopes)
|
||||
return configuration
|
||||
|
||||
@@ -78,24 +78,17 @@
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
}
|
||||
},
|
||||
"centos:stream": {
|
||||
"centos:stream9": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"template": "container/centos_stream9.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"build": "spack/centos-stream9",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7"
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
"template": "container/leap-15.dockerfile"
|
||||
|
||||
@@ -59,7 +59,7 @@ def __init__(
|
||||
self.buildcache_flag = ""
|
||||
|
||||
|
||||
class DepfileSpecVisitor(traverse.AbstractVisitor):
|
||||
class DepfileSpecVisitor:
|
||||
"""This visitor produces an adjacency list of a (reduced) DAG, which
|
||||
is used to generate depfile targets with their prerequisites. Currently
|
||||
it only drops build deps when using buildcache only mode.
|
||||
@@ -75,17 +75,17 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
||||
self.depflag_root = _deptypes(pkg_buildcache)
|
||||
self.depflag_deps = _deptypes(deps_buildcache)
|
||||
|
||||
def neighbors(self, node: traverse.EdgeAndDepth) -> List[spack.spec.DependencySpec]:
|
||||
def neighbors(self, node):
|
||||
"""Produce a list of spec to follow from node"""
|
||||
depflag = self.depflag_root if node[1] == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node[0].spec.edges_to_dependencies(depflag=depflag))
|
||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
def accept(self, node: traverse.EdgeAndDepth) -> bool:
|
||||
def accept(self, node):
|
||||
self.adjacency_list.append(
|
||||
DepfileNode(
|
||||
target=node[0].spec,
|
||||
target=node.edge.spec,
|
||||
prereqs=[edge.spec for edge in self.neighbors(node)],
|
||||
buildcache=self.pkg_buildcache if node[1] == 0 else self.deps_buildcache,
|
||||
buildcache=self.pkg_buildcache if node.depth == 0 else self.deps_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -24,12 +24,15 @@
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.filesystem_view as fsv
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.main
|
||||
@@ -52,7 +55,6 @@
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack import traverse
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
@@ -606,7 +608,7 @@ def __init__(
|
||||
self.projections = projections
|
||||
self.select = select
|
||||
self.exclude = exclude
|
||||
self.link_type = view_func_parser(link_type)
|
||||
self.link_type = fsv.canonicalize_link_type(link_type)
|
||||
self.link = link
|
||||
|
||||
def select_fn(self, spec):
|
||||
@@ -640,7 +642,7 @@ def to_dict(self):
|
||||
if self.exclude:
|
||||
ret["exclude"] = self.exclude
|
||||
if self.link_type:
|
||||
ret["link_type"] = inverse_view_func_parser(self.link_type)
|
||||
ret["link_type"] = self.link_type
|
||||
if self.link != default_view_link:
|
||||
ret["link"] = self.link
|
||||
return ret
|
||||
@@ -690,7 +692,7 @@ def get_projection_for_spec(self, spec):
|
||||
to exist on the filesystem."""
|
||||
return self._view(self.root).get_projection_for_spec(spec)
|
||||
|
||||
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
||||
def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
||||
"""
|
||||
Returns a view object for the *underlying* view directory. This means that the
|
||||
self.root symlink is followed, and that the view has to exist on the filesystem
|
||||
@@ -710,14 +712,14 @@ def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
||||
)
|
||||
return self._view(path)
|
||||
|
||||
def _view(self, root: str) -> SimpleFilesystemView:
|
||||
def _view(self, root: str) -> fsv.SimpleFilesystemView:
|
||||
"""Returns a view object for a given root dir."""
|
||||
return SimpleFilesystemView(
|
||||
return fsv.SimpleFilesystemView(
|
||||
root,
|
||||
spack.store.STORE.layout,
|
||||
ignore_conflicts=True,
|
||||
projections=self.projections,
|
||||
link=self.link_type,
|
||||
link_type=self.link_type,
|
||||
)
|
||||
|
||||
def __contains__(self, spec):
|
||||
@@ -2473,27 +2475,21 @@ def _equiv_dict(first, second):
|
||||
return same_values and same_keys_with_same_overrides
|
||||
|
||||
|
||||
def display_specs(concretized_specs):
|
||||
"""Displays the list of specs returned by `Environment.concretize()`.
|
||||
def display_specs(specs):
|
||||
"""Displays a list of specs traversed breadth-first, covering nodes, with install status.
|
||||
|
||||
Args:
|
||||
concretized_specs (list): list of specs returned by
|
||||
`Environment.concretize()`
|
||||
specs (list): list of specs
|
||||
"""
|
||||
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
)
|
||||
|
||||
for user_spec, concrete_spec in concretized_specs:
|
||||
tty.msg("Concretized {0}".format(user_spec))
|
||||
sys.stdout.write(_tree_to_display(concrete_spec))
|
||||
print("")
|
||||
tree_string = spack.spec.tree(
|
||||
specs,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
hashes=True,
|
||||
hashlen=7,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
@@ -2547,7 +2543,7 @@ def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath()
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
@@ -2556,7 +2552,7 @@ def make_repo_path(root):
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.Repo(repo_root)
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
@@ -3032,7 +3028,7 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
SpackEnvironmentError: if the manifest includes a remote file but
|
||||
no configuration stage directory has been identified
|
||||
"""
|
||||
scopes = []
|
||||
scopes: List[spack.config.ConfigScope] = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
@@ -3101,23 +3097,21 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = "env:%s:%s" % (env_name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||
scopes.append(
|
||||
spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
@@ -3134,7 +3128,10 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
self.scope_name,
|
||||
str(self.manifest_file),
|
||||
spack.schema.env.schema,
|
||||
yaml_path=[TOP_LEVEL_KEY],
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
|
||||
@@ -10,8 +10,9 @@
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from typing import Optional
|
||||
from typing import Callable, Dict, Optional
|
||||
|
||||
from llnl.string import comma_or
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import (
|
||||
mkdirp,
|
||||
@@ -49,19 +50,20 @@
|
||||
_projections_path = ".spack/projections.yaml"
|
||||
|
||||
|
||||
def view_symlink(src, dst, **kwargs):
|
||||
# keyword arguments are irrelevant
|
||||
# here to fit required call signature
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
||||
|
||||
|
||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
symlink(src, dst)
|
||||
|
||||
|
||||
def view_hardlink(src, dst, **kwargs):
|
||||
# keyword arguments are irrelevant
|
||||
# here to fit required call signature
|
||||
def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
os.link(src, dst)
|
||||
|
||||
|
||||
def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
||||
def view_copy(
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
||||
) -> None:
|
||||
"""
|
||||
Copy a file from src to dst.
|
||||
|
||||
@@ -104,27 +106,40 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
|
||||
|
||||
def view_func_parser(parsed_name):
|
||||
# What method are we using for this view
|
||||
if parsed_name in ("hardlink", "hard"):
|
||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||
_LINK_TYPES = {
|
||||
"hardlink": "hardlink",
|
||||
"hard": "hardlink",
|
||||
"copy": "copy",
|
||||
"relocate": "copy",
|
||||
"add": "symlink",
|
||||
"symlink": "symlink",
|
||||
"soft": "symlink",
|
||||
}
|
||||
|
||||
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
||||
|
||||
|
||||
def canonicalize_link_type(link_type: str) -> str:
|
||||
"""Return canonical"""
|
||||
canonical = _LINK_TYPES.get(link_type)
|
||||
if not canonical:
|
||||
raise ValueError(
|
||||
f"Invalid link type: '{link_type}. Must be one of {comma_or(_VALID_LINK_TYPES)}'"
|
||||
)
|
||||
return canonical
|
||||
|
||||
|
||||
def function_for_link_type(link_type: str) -> LinkCallbackType:
|
||||
link_type = canonicalize_link_type(link_type)
|
||||
if link_type == "hardlink":
|
||||
return view_hardlink
|
||||
elif parsed_name in ("copy", "relocate"):
|
||||
return view_copy
|
||||
elif parsed_name in ("add", "symlink", "soft"):
|
||||
elif link_type == "symlink":
|
||||
return view_symlink
|
||||
else:
|
||||
raise ValueError(f"invalid link type for view: '{parsed_name}'")
|
||||
elif link_type == "copy":
|
||||
return view_copy
|
||||
|
||||
|
||||
def inverse_view_func_parser(view_type):
|
||||
# get string based on view type
|
||||
if view_type is view_hardlink:
|
||||
link_name = "hardlink"
|
||||
elif view_type is view_copy:
|
||||
link_name = "copy"
|
||||
else:
|
||||
link_name = "symlink"
|
||||
return link_name
|
||||
assert False, "invalid link type" # need mypy Literal values
|
||||
|
||||
|
||||
class FilesystemView:
|
||||
@@ -140,7 +155,16 @@ class FilesystemView:
|
||||
directory structure.
|
||||
"""
|
||||
|
||||
def __init__(self, root, layout, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
verbose: bool = False,
|
||||
link_type: str = "symlink",
|
||||
):
|
||||
"""
|
||||
Initialize a filesystem view under the given `root` directory with
|
||||
corresponding directory `layout`.
|
||||
@@ -149,15 +173,14 @@ def __init__(self, root, layout, **kwargs):
|
||||
"""
|
||||
self._root = root
|
||||
self.layout = layout
|
||||
self.projections = {} if projections is None else projections
|
||||
|
||||
self.projections = kwargs.get("projections", {})
|
||||
|
||||
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
|
||||
self.verbose = kwargs.get("verbose", False)
|
||||
self.ignore_conflicts = ignore_conflicts
|
||||
self.verbose = verbose
|
||||
|
||||
# Setup link function to include view
|
||||
link_func = kwargs.get("link", view_symlink)
|
||||
self.link = ft.partial(link_func, view=self)
|
||||
self.link_type = link_type
|
||||
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
||||
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
@@ -255,8 +278,24 @@ class YamlFilesystemView(FilesystemView):
|
||||
Filesystem view to work with a yaml based directory layout.
|
||||
"""
|
||||
|
||||
def __init__(self, root, layout, **kwargs):
|
||||
super().__init__(root, layout, **kwargs)
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
verbose: bool = False,
|
||||
link_type: str = "symlink",
|
||||
):
|
||||
super().__init__(
|
||||
root,
|
||||
layout,
|
||||
projections=projections,
|
||||
ignore_conflicts=ignore_conflicts,
|
||||
verbose=verbose,
|
||||
link_type=link_type,
|
||||
)
|
||||
|
||||
# Super class gets projections from the kwargs
|
||||
# YAML specific to get projections from YAML file
|
||||
@@ -638,9 +677,6 @@ class SimpleFilesystemView(FilesystemView):
|
||||
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
||||
views, where specs cannot be removed after they were added."""
|
||||
|
||||
def __init__(self, root, layout, **kwargs):
|
||||
super().__init__(root, layout, **kwargs)
|
||||
|
||||
def _sanity_check_view_projection(self, specs):
|
||||
"""A very common issue is that we end up with two specs of the same package, that project
|
||||
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
||||
|
||||
@@ -582,7 +582,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
|
||||
# Create a source repo and get the pkg directory out of it.
|
||||
try:
|
||||
source_repo = spack.repo.Repo(source_repo_root)
|
||||
source_repo = spack.repo.from_path(source_repo_root)
|
||||
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
||||
except spack.repo.RepoError as err:
|
||||
tty.debug(f"Failed to create source repo for {node.name}: {str(err)}")
|
||||
@@ -593,7 +593,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
if not os.path.exists(dest_repo_root):
|
||||
spack.repo.create_repo(dest_repo_root)
|
||||
repo = spack.repo.Repo(dest_repo_root)
|
||||
repo = spack.repo.from_path(dest_repo_root)
|
||||
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
@@ -1542,17 +1542,6 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
return
|
||||
|
||||
# Skip out early if the spec is not being installed locally (i.e., if
|
||||
# external or upstream).
|
||||
#
|
||||
# External and upstream packages need to get flagged as installed to
|
||||
# ensure proper status tracking for environment build.
|
||||
explicit = request.pkg.spec.dag_hash() in request.install_args.get("explicit", [])
|
||||
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
||||
if not_local:
|
||||
self._flag_installed(request.pkg)
|
||||
return
|
||||
|
||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||
|
||||
install_deps = request.install_args.get("install_deps")
|
||||
@@ -2029,11 +2018,10 @@ def install(self) -> None:
|
||||
# Skip the installation if the spec is not being installed locally
|
||||
# (i.e., if external or upstream) BUT flag it as installed since
|
||||
# some package likely depends on it.
|
||||
if not task.explicit:
|
||||
if _handle_external_and_upstream(pkg, False):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
if _handle_external_and_upstream(pkg, task.explicit):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
|
||||
@@ -143,6 +143,7 @@ def __init__(self):
|
||||
"12": "monterey",
|
||||
"13": "ventura",
|
||||
"14": "sonoma",
|
||||
"15": "sequoia",
|
||||
}
|
||||
|
||||
version = macos_version()
|
||||
|
||||
@@ -621,10 +621,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
#: By default do not run tests within package's install()
|
||||
run_tests = False
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror: Optional[str] = None
|
||||
|
||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||
extendable = False
|
||||
|
||||
@@ -752,11 +748,6 @@ def __init__(self, spec):
|
||||
self._fetch_time = 0.0
|
||||
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
|
||||
super().__init__()
|
||||
|
||||
@classmethod
|
||||
@@ -930,6 +921,32 @@ def global_license_file(self):
|
||||
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
||||
)
|
||||
|
||||
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
||||
# Python 3.8+, but we still support 3.6.
|
||||
@property
|
||||
def keep_werror(self) -> Optional[str]:
|
||||
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
||||
|
||||
Valid return values are:
|
||||
* ``"all"``: keep all ``-Werror`` flags.
|
||||
* ``"specific"``: keep only ``-Werror=specific-warning`` flags.
|
||||
* ``"none"``: filter out all ``-Werror*`` flags.
|
||||
* ``None``: respect the user's configuration (``"none"`` by default).
|
||||
"""
|
||||
if self.spec.satisfies("%nvhpc@:23.3") or self.spec.satisfies("%pgi"):
|
||||
# Filtering works by replacing -Werror with -Wno-error, but older nvhpc and
|
||||
# PGI do not understand -Wno-error, so we disable filtering.
|
||||
return "all"
|
||||
|
||||
elif self.spec.satisfies("%nvhpc@23.4:"):
|
||||
# newer nvhpc supports -Wno-error but can't disable specific warnings with
|
||||
# -Wno-error=warning. Skip -Werror=warning, but still filter -Werror.
|
||||
return "specific"
|
||||
|
||||
else:
|
||||
# use -Werror disablement by default for other compilers
|
||||
return None
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
if not self.spec.versions.concrete:
|
||||
@@ -1119,10 +1136,9 @@ def _make_stage(self):
|
||||
if not link_format:
|
||||
link_format = "build-{arch}-{hash:7}"
|
||||
stage_link = self.spec.format_path(link_format)
|
||||
return DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
||||
|
||||
# To fetch the current version
|
||||
source_stage = self._make_root_stage(self.fetcher)
|
||||
source_stage = DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
||||
else:
|
||||
source_stage = self._make_root_stage(self.fetcher)
|
||||
|
||||
# all_stages is source + resources + patches
|
||||
all_stages = StageComposite()
|
||||
@@ -1451,10 +1467,8 @@ def do_fetch(self, mirror_only=False):
|
||||
return
|
||||
|
||||
checksum = spack.config.get("config:checksum")
|
||||
fetch = self.stage.needs_fetching
|
||||
if (
|
||||
checksum
|
||||
and fetch
|
||||
and (self.version not in self.versions)
|
||||
and (not isinstance(self.version, GitVersion))
|
||||
):
|
||||
@@ -1561,13 +1575,11 @@ def do_patch(self):
|
||||
tty.debug("Patching failed last time. Restaging.")
|
||||
self.stage.restage()
|
||||
else:
|
||||
# develop specs/ DIYStages may have patch failures but
|
||||
# should never be restaged
|
||||
msg = (
|
||||
"A patch failure was detected in %s." % self.name
|
||||
+ " Build errors may occur due to this."
|
||||
# develop specs may have patch failures but should never be restaged
|
||||
tty.warn(
|
||||
f"A patch failure was detected in {self.name}."
|
||||
" Build errors may occur due to this."
|
||||
)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
# If this file exists, then we already applied all the patches.
|
||||
@@ -2371,10 +2383,6 @@ def do_deprecate(self, deprecator, link_fn):
|
||||
PackageBase.uninstall_by_spec(spec, force=True, deprecator=deprecator)
|
||||
link_fn(deprecator.prefix, spec.prefix)
|
||||
|
||||
def _check_extendable(self):
|
||||
if not self.extendable:
|
||||
raise ValueError("Package %s is not extendable!" % self.name)
|
||||
|
||||
def view(self):
|
||||
"""Create a view with the prefix of this package as the root.
|
||||
Extensions added to this view will modify the installation prefix of
|
||||
|
||||
@@ -25,7 +25,8 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Set, Tuple, Union
|
||||
import warnings
|
||||
from typing import Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -126,11 +127,35 @@ def exec_module(self, module):
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
|
||||
Return a loader based on the inspection of the current global repository list.
|
||||
Returns a loader based on the inspection of the current repository list.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._repo_init = _path
|
||||
self._repo = None
|
||||
|
||||
@property
|
||||
def current_repository(self):
|
||||
if self._repo is None:
|
||||
self._repo = self._repo_init()
|
||||
return self._repo
|
||||
|
||||
@current_repository.setter
|
||||
def current_repository(self, value):
|
||||
self._repo = value
|
||||
|
||||
@contextlib.contextmanager
|
||||
def switch_repo(self, substitute: "RepoType"):
|
||||
"""Switch the current repository list for the duration of the context manager."""
|
||||
old = self.current_repository
|
||||
try:
|
||||
self.current_repository = substitute
|
||||
yield
|
||||
finally:
|
||||
self.current_repository = old
|
||||
|
||||
def find_spec(self, fullname, python_path, target=None):
|
||||
# "target" is not None only when calling importlib.reload()
|
||||
if target is not None:
|
||||
@@ -149,9 +174,14 @@ def compute_loader(self, fullname):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
namespace, dot, module_name = fullname.rpartition(".")
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's
|
||||
# namespace, let the repo handle it.
|
||||
for repo in PATH.repos:
|
||||
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
|
||||
is_repo_path = isinstance(self.current_repository, RepoPath)
|
||||
if is_repo_path:
|
||||
repos = self.current_repository.repos
|
||||
else:
|
||||
repos = [self.current_repository]
|
||||
|
||||
for repo in repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
if namespace == repo.full_namespace:
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
@@ -165,7 +195,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if PATH.by_namespace.is_prefix(fullname):
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -560,7 +590,7 @@ def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
cache: spack.caches.FileCacheType,
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
@@ -648,11 +678,9 @@ class RepoPath:
|
||||
repos (list): list Repo objects or paths to put in this RepoPath
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
def __init__(self, *repos, cache, overrides=None):
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
self._provider_index = None
|
||||
self._patch_index = None
|
||||
self._tag_index = None
|
||||
@@ -661,7 +689,8 @@ def __init__(self, *repos, **kwargs):
|
||||
for repo in repos:
|
||||
try:
|
||||
if isinstance(repo, str):
|
||||
repo = Repo(repo, cache=cache)
|
||||
repo = Repo(repo, cache=cache, overrides=overrides)
|
||||
repo.finder(self)
|
||||
self.put_last(repo)
|
||||
except RepoError as e:
|
||||
tty.warn(
|
||||
@@ -915,18 +944,28 @@ class Repo:
|
||||
Each package repository must have a top-level configuration file
|
||||
called `repo.yaml`.
|
||||
|
||||
Currently, `repo.yaml` this must define:
|
||||
Currently, `repo.yaml` must define:
|
||||
|
||||
`namespace`:
|
||||
A Python namespace where the repository's packages should live.
|
||||
|
||||
`subdirectory`:
|
||||
An optional subdirectory name where packages are placed
|
||||
"""
|
||||
|
||||
def __init__(self, root, cache=None):
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
cache: spack.caches.FileCacheType,
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
|
||||
Args:
|
||||
root: the root directory of the repository
|
||||
cache: file cache associated with this repository
|
||||
overrides: dict mapping package name to class attribute overrides for that package
|
||||
"""
|
||||
# Root directory, containing _repo.yaml and package dirs
|
||||
# Allow roots to by spack-relative by starting with '$spack'
|
||||
@@ -939,20 +978,20 @@ def check(condition, msg):
|
||||
|
||||
# Validate repository layout.
|
||||
self.config_file = os.path.join(self.root, repo_config_name)
|
||||
check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root))
|
||||
check(os.path.isfile(self.config_file), f"No {repo_config_name} found in '{root}'")
|
||||
|
||||
# Read configuration and validate namespace
|
||||
config = self._read_config()
|
||||
check(
|
||||
"namespace" in config,
|
||||
"%s must define a namespace." % os.path.join(root, repo_config_name),
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
|
||||
self.namespace = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root))
|
||||
+ "Namespaces must be valid python identifiers separated by '.'",
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
|
||||
# Set up 'full_namespace' to include the super-namespace
|
||||
@@ -964,23 +1003,26 @@ def check(condition, msg):
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path),
|
||||
"No directory '%s' found in '%s'" % (packages_dir, root),
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
# These are internal cache variables.
|
||||
self._modules = {}
|
||||
self._classes = {}
|
||||
self._instances = {}
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
# Optional reference to a RepoPath to influence module import from spack.pkg
|
||||
self._finder: Optional[RepoPath] = None
|
||||
|
||||
# Maps that goes from package name to corresponding file stat
|
||||
self._fast_package_checker = None
|
||||
self._fast_package_checker: Optional[FastPackageChecker] = None
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
self._cache = cache
|
||||
|
||||
def real_name(self, import_name):
|
||||
def finder(self, value: RepoPath) -> None:
|
||||
self._finder = value
|
||||
|
||||
def real_name(self, import_name: str) -> Optional[str]:
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
||||
A python identifier might map to many different Spack package
|
||||
@@ -999,18 +1041,21 @@ def real_name(self, import_name):
|
||||
return import_name
|
||||
|
||||
options = nm.possible_spack_module_names(import_name)
|
||||
options.remove(import_name)
|
||||
try:
|
||||
options.remove(import_name)
|
||||
except ValueError:
|
||||
pass
|
||||
for name in options:
|
||||
if name in self:
|
||||
return name
|
||||
return None
|
||||
|
||||
def is_prefix(self, fullname):
|
||||
def is_prefix(self, fullname: str) -> bool:
|
||||
"""True if fullname is a prefix of this Repo's namespace."""
|
||||
parts = fullname.split(".")
|
||||
return self._names[: len(parts)] == parts
|
||||
|
||||
def _read_config(self):
|
||||
def _read_config(self) -> Dict[str, str]:
|
||||
"""Check for a YAML config file in this db's root directory."""
|
||||
try:
|
||||
with open(self.config_file) as reponame_file:
|
||||
@@ -1021,14 +1066,14 @@ def _read_config(self):
|
||||
or "repo" not in yaml_data
|
||||
or not isinstance(yaml_data["repo"], dict)
|
||||
):
|
||||
tty.die("Invalid %s in repository %s" % (repo_config_name, self.root))
|
||||
tty.die(f"Invalid {repo_config_name} in repository {self.root}")
|
||||
|
||||
return yaml_data["repo"]
|
||||
|
||||
except IOError:
|
||||
tty.die("Error reading %s when opening %s" % (self.config_file, self.root))
|
||||
tty.die(f"Error reading {self.config_file} when opening {self.root}")
|
||||
|
||||
def get(self, spec):
|
||||
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||
"""Returns the package associated with the supplied spec."""
|
||||
msg = "Repo.get can only be called on concrete specs"
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
@@ -1049,16 +1094,13 @@ def get(self, spec):
|
||||
# pass these through as their error messages will be fine.
|
||||
raise
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
|
||||
# Make sure other errors in constructors hit the error
|
||||
# handler by wrapping them
|
||||
if spack.config.get("config:debug"):
|
||||
sys.excepthook(*sys.exc_info())
|
||||
raise FailedConstructorError(spec.fullname, *sys.exc_info())
|
||||
tty.debug(e)
|
||||
raise FailedConstructorError(spec.fullname, *sys.exc_info()) from e
|
||||
|
||||
@autospec
|
||||
def dump_provenance(self, spec, path):
|
||||
def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||
"""Dump provenance information for a spec to a particular path.
|
||||
|
||||
This dumps the package file and any associated patch files.
|
||||
@@ -1066,7 +1108,7 @@ def dump_provenance(self, spec, path):
|
||||
"""
|
||||
if spec.namespace and spec.namespace != self.namespace:
|
||||
raise UnknownPackageError(
|
||||
"Repository %s does not contain package %s." % (self.namespace, spec.fullname)
|
||||
f"Repository {self.namespace} does not contain package {spec.fullname}."
|
||||
)
|
||||
|
||||
package_path = self.filename_for_package_name(spec.name)
|
||||
@@ -1083,17 +1125,13 @@ def dump_provenance(self, spec, path):
|
||||
if os.path.exists(patch.path):
|
||||
fs.install(patch.path, path)
|
||||
else:
|
||||
tty.warn("Patch file did not exist: %s" % patch.path)
|
||||
warnings.warn(f"Patch file did not exist: {patch.path}")
|
||||
|
||||
# Install the package.py file itself.
|
||||
fs.install(self.filename_for_package_name(spec.name), path)
|
||||
|
||||
def purge(self):
|
||||
"""Clear entire package instance cache."""
|
||||
self._instances.clear()
|
||||
|
||||
@property
|
||||
def index(self):
|
||||
def index(self) -> RepoIndex:
|
||||
"""Construct the index for this repo lazily."""
|
||||
if self._repo_index is None:
|
||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace, cache=self._cache)
|
||||
@@ -1103,42 +1141,40 @@ def index(self):
|
||||
return self._repo_index
|
||||
|
||||
@property
|
||||
def provider_index(self):
|
||||
def provider_index(self) -> spack.provider_index.ProviderIndex:
|
||||
"""A provider index with names *specific* to this repo."""
|
||||
return self.index["providers"]
|
||||
|
||||
@property
|
||||
def tag_index(self):
|
||||
def tag_index(self) -> spack.tag.TagIndex:
|
||||
"""Index of tags and which packages they're defined on."""
|
||||
return self.index["tags"]
|
||||
|
||||
@property
|
||||
def patch_index(self):
|
||||
def patch_index(self) -> spack.patch.PatchCache:
|
||||
"""Index of patches and packages they're defined on."""
|
||||
return self.index["patches"]
|
||||
|
||||
@autospec
|
||||
def providers_for(self, vpkg_spec):
|
||||
def providers_for(self, vpkg_spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
||||
providers = self.provider_index.providers_for(vpkg_spec)
|
||||
if not providers:
|
||||
raise UnknownPackageError(vpkg_spec.fullname)
|
||||
return providers
|
||||
|
||||
@autospec
|
||||
def extensions_for(self, extendee_spec):
|
||||
return [
|
||||
pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
for pkg_cls in self.all_package_classes()
|
||||
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)
|
||||
]
|
||||
def extensions_for(
|
||||
self, extendee_spec: "spack.spec.Spec"
|
||||
) -> List["spack.package_base.PackageBase"]:
|
||||
result = [pkg_cls(spack.spec.Spec(pkg_cls.name)) for pkg_cls in self.all_package_classes()]
|
||||
return [x for x in result if x.extends(extendee_spec)]
|
||||
|
||||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
directory that contains its package.py file."""
|
||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Given a package name, get the directory containing its package.py file."""
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
|
||||
def filename_for_package_name(self, pkg_name):
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Get the filename for the module we should load for a particular
|
||||
package. Packages for a Repo live in
|
||||
``$root/<package_name>/package.py``
|
||||
@@ -1151,23 +1187,23 @@ def filename_for_package_name(self, pkg_name):
|
||||
return os.path.join(pkg_dir, package_file_name)
|
||||
|
||||
@property
|
||||
def _pkg_checker(self):
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals=False):
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
"""Returns a sorted list of all package names in the Repo."""
|
||||
names = sorted(self._pkg_checker.keys())
|
||||
if include_virtuals:
|
||||
return names
|
||||
return [x for x in names if not self.is_virtual(x)]
|
||||
|
||||
def package_path(self, name):
|
||||
def package_path(self, name: str) -> str:
|
||||
"""Get path to package.py file for this repo."""
|
||||
return os.path.join(self.packages_path, name, package_file_name)
|
||||
|
||||
def all_package_paths(self):
|
||||
def all_package_paths(self) -> Generator[str, None, None]:
|
||||
for name in self.all_package_names():
|
||||
yield self.package_path(name)
|
||||
|
||||
@@ -1176,7 +1212,7 @@ def packages_with_tags(self, *tags: str) -> Set[str]:
|
||||
v.intersection_update(*(self.tag_index[tag.lower()] for tag in tags))
|
||||
return v
|
||||
|
||||
def all_package_classes(self):
|
||||
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:
|
||||
"""Iterator over all package *classes* in the repository.
|
||||
|
||||
Use this with care, because loading packages is slow.
|
||||
@@ -1184,7 +1220,7 @@ def all_package_classes(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.get_pkg_class(name)
|
||||
|
||||
def exists(self, pkg_name):
|
||||
def exists(self, pkg_name: str) -> bool:
|
||||
"""Whether a package with the supplied name exists."""
|
||||
if pkg_name is None:
|
||||
return False
|
||||
@@ -1201,28 +1237,22 @@ def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
return self._pkg_checker.last_mtime()
|
||||
|
||||
def is_virtual(self, pkg_name):
|
||||
def is_virtual(self, pkg_name: str) -> bool:
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function use the provider index. If calling from a code block that
|
||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return pkg_name in self.provider_index
|
||||
|
||||
def is_virtual_safe(self, pkg_name):
|
||||
def is_virtual_safe(self, pkg_name: str) -> bool:
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function doesn't use the provider index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||
|
||||
def get_pkg_class(self, pkg_name):
|
||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
||||
"""Get the class for the package out of its module.
|
||||
|
||||
First loads (or fetches from cache) a module for the
|
||||
@@ -1234,7 +1264,8 @@ def get_pkg_class(self, pkg_name):
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
with REPOS_FINDER.switch_repo(self._finder or self):
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(fullname)
|
||||
except Exception as e:
|
||||
@@ -1245,26 +1276,21 @@ def get_pkg_class(self, pkg_name):
|
||||
if not inspect.isclass(cls):
|
||||
tty.die(f"{pkg_name}.{class_name} is not a class")
|
||||
|
||||
new_cfg_settings = (
|
||||
spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {})
|
||||
)
|
||||
|
||||
# Clear any prior changes to class attributes in case the class was loaded from the
|
||||
# same repo, but with different overrides
|
||||
overridden_attrs = getattr(cls, "overridden_attrs", {})
|
||||
attrs_exclusively_from_config = getattr(cls, "attrs_exclusively_from_config", [])
|
||||
# Clear any prior changes to class attributes in case the config has
|
||||
# since changed
|
||||
for key, val in overridden_attrs.items():
|
||||
setattr(cls, key, val)
|
||||
for key in attrs_exclusively_from_config:
|
||||
delattr(cls, key)
|
||||
|
||||
# Keep track of every class attribute that is overridden by the config:
|
||||
# if the config changes between calls to this method, we make sure to
|
||||
# restore the original config values (in case the new config no longer
|
||||
# sets attributes that it used to)
|
||||
# Keep track of every class attribute that is overridden: if different overrides
|
||||
# dictionaries are used on the same physical repo, we make sure to restore the original
|
||||
# config values
|
||||
new_overridden_attrs = {}
|
||||
new_attrs_exclusively_from_config = set()
|
||||
for key, val in new_cfg_settings.items():
|
||||
for key, val in self.overrides.get(pkg_name, {}).items():
|
||||
if hasattr(cls, key):
|
||||
new_overridden_attrs[key] = getattr(cls, key)
|
||||
else:
|
||||
@@ -1291,13 +1317,13 @@ def partition_package_name(self, pkg_name: str) -> Tuple[str, str]:
|
||||
|
||||
return namespace, pkg_name
|
||||
|
||||
def __str__(self):
|
||||
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
|
||||
def __str__(self) -> str:
|
||||
return f"Repo '{self.namespace}' at {self.root}"
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return self.__str__()
|
||||
|
||||
def __contains__(self, pkg_name):
|
||||
def __contains__(self, pkg_name: str) -> bool:
|
||||
return self.exists(pkg_name)
|
||||
|
||||
|
||||
@@ -1373,12 +1399,17 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
return full_path, namespace
|
||||
|
||||
|
||||
def from_path(path: str) -> "Repo":
|
||||
"""Returns a repository from the path passed as input. Injects the global misc cache."""
|
||||
return Repo(path, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def create_or_construct(path, namespace=None):
|
||||
"""Create a repository, or just return a Repo if it already exists."""
|
||||
if not os.path.exists(path):
|
||||
fs.mkdirp(path)
|
||||
create_repo(path, namespace)
|
||||
return Repo(path)
|
||||
return from_path(path)
|
||||
|
||||
|
||||
def _path(configuration=None):
|
||||
@@ -1396,7 +1427,17 @@ def create(configuration):
|
||||
repo_dirs = configuration.get("repos")
|
||||
if not repo_dirs:
|
||||
raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
|
||||
return RepoPath(*repo_dirs)
|
||||
|
||||
overrides = {}
|
||||
for pkg_name, data in configuration.get("packages").items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
value = data.get("package_attributes", {})
|
||||
if not value:
|
||||
continue
|
||||
overrides[pkg_name] = value
|
||||
|
||||
return RepoPath(*repo_dirs, cache=spack.caches.MISC_CACHE, overrides=overrides)
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
|
||||
@@ -116,6 +116,8 @@ class Provenance(enum.IntEnum):
|
||||
PACKAGE_PY = enum.auto()
|
||||
# An installed spec
|
||||
INSTALLED = enum.auto()
|
||||
# lower provenance for installed git refs so concretizer prefers StandardVersion installs
|
||||
INSTALLED_GIT_VERSION = enum.auto()
|
||||
# A runtime injected from another package (e.g. a compiler)
|
||||
RUNTIME = enum.auto()
|
||||
|
||||
@@ -844,8 +846,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
parent_dir = os.path.dirname(__file__)
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
||||
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
@@ -1880,11 +1880,8 @@ def _spec_clauses(
|
||||
)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
if variant.propagate:
|
||||
clauses.append(
|
||||
f.variant_propagation_candidate(spec.name, vname, value, spec.name)
|
||||
)
|
||||
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
||||
|
||||
# Tell the concretizer that this is a possible value for the
|
||||
# variant, to account for things like int/str values where we
|
||||
@@ -1917,9 +1914,12 @@ def _spec_clauses(
|
||||
for flag_type, flags in spec.compiler_flags.items():
|
||||
for flag in flags:
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
||||
clauses.append(
|
||||
f.propagate(
|
||||
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
||||
)
|
||||
)
|
||||
|
||||
# dependencies
|
||||
if spec.concrete:
|
||||
@@ -2072,7 +2072,7 @@ def define_ad_hoc_versions_from_specs(
|
||||
# best possible, so they're guaranteed to be used preferentially.
|
||||
version = s.versions.concrete
|
||||
|
||||
if version is None or any(v == version for v in self.possible_versions[s.name]):
|
||||
if version is None or (any((v == version) for v in self.possible_versions[s.name])):
|
||||
continue
|
||||
|
||||
if require_checksum and not _is_checksummed_git_version(version):
|
||||
@@ -2386,9 +2386,16 @@ def concrete_specs(self):
|
||||
# - Add OS to possible OS's
|
||||
for dep in spec.traverse():
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||
)
|
||||
if isinstance(dep.version, vn.GitVersion):
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(
|
||||
version=dep.version, idx=0, origin=Provenance.INSTALLED_GIT_VERSION
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||
)
|
||||
self.possible_oses.add(dep.os)
|
||||
|
||||
def define_concrete_input_specs(self, specs, possible):
|
||||
@@ -2737,9 +2744,7 @@ class _Head:
|
||||
node_compiler = fn.attr("node_compiler_set")
|
||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||
node_flag = fn.attr("node_flag_set")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||
propagate = fn.attr("propagate")
|
||||
|
||||
|
||||
class _Body:
|
||||
@@ -2754,9 +2759,7 @@ class _Body:
|
||||
node_compiler = fn.attr("node_compiler")
|
||||
node_compiler_version = fn.attr("node_compiler_version")
|
||||
node_flag = fn.attr("node_flag")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||
propagate = fn.attr("propagate")
|
||||
|
||||
|
||||
class ProblemInstanceBuilder:
|
||||
@@ -3230,6 +3233,39 @@ def requires(self, impose: str, *, when: str):
|
||||
self.runtime_conditions.add((imposed_spec, when_spec))
|
||||
self.reset()
|
||||
|
||||
def propagate(self, constraint_str: str, *, when: str):
|
||||
msg = "the 'propagate' method can be called only with pkg('*')"
|
||||
assert self.current_package == "*", msg
|
||||
|
||||
when_spec = spack.spec.Spec(when)
|
||||
assert when_spec.name is None, "only anonymous when specs are accepted"
|
||||
|
||||
placeholder = "XXX"
|
||||
node_variable = "node(ID, Package)"
|
||||
when_spec.name = placeholder
|
||||
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{placeholder}"', f"{node_variable}")
|
||||
|
||||
constraint_spec = spack.spec.Spec(constraint_str)
|
||||
assert constraint_spec.name is None, "only anonymous constraint specs are accepted"
|
||||
|
||||
constraint_spec.name = placeholder
|
||||
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False)
|
||||
for clause in constraint_clauses:
|
||||
if clause.args[0] == "node_compiler_version_satisfies":
|
||||
self._setup.compiler_version_constraints.add(constraint_spec.compiler)
|
||||
args = f'"{constraint_spec.compiler.name}", "{constraint_spec.compiler.versions}"'
|
||||
head_str = f"propagate({node_variable}, node_compiler_version_satisfies({args}))"
|
||||
rule = f"{head_str} :-\n{body_str}.\n\n"
|
||||
self.rules.append(rule)
|
||||
|
||||
self.reset()
|
||||
|
||||
def consume_facts(self):
|
||||
"""Consume the facts collected by this object, and emits rules and
|
||||
facts for the runtimes.
|
||||
@@ -3309,6 +3345,8 @@ def hash(self, node, h):
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def _arch(self, node):
|
||||
arch = self._specs[node].architecture
|
||||
@@ -3361,9 +3399,6 @@ def node_flag(self, node, flag_type, flag):
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx has been selected for this package."""
|
||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
@@ -3456,7 +3491,7 @@ def reorder_flags(self):
|
||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
||||
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
@@ -3526,9 +3561,8 @@ def build_specs(self, function_tuples):
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
if spec and spec.concrete and name != "node_flag_source":
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
||||
@@ -256,6 +255,7 @@ possible_version_weight(node(ID, Package), Weight)
|
||||
:- attr("version", node(ID, Package), Version),
|
||||
version_weight(node(ID, Package), Weight),
|
||||
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||
not pkg_fact(Package, version_declared(Version, Weight, "installed_git_version")),
|
||||
not build(node(ID, Package)),
|
||||
internal_error("Build version weight used for reused package").
|
||||
|
||||
@@ -811,37 +811,6 @@ node_has_variant(node(ID, Package), Variant) :-
|
||||
pkg_fact(Package, variant(Variant)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% Variant propagation is forwarded to dependencies
|
||||
attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :-
|
||||
attr("node", PackageNode),
|
||||
depends_on(ParentNode, PackageNode),
|
||||
attr("variant_value", node(_, Source), Variant, Value),
|
||||
attr("variant_propagation_candidate", ParentNode, Variant, _, Source).
|
||||
|
||||
% If the node is a candidate, and it has the variant and value,
|
||||
% then those variant and value should be propagated
|
||||
attr("variant_propagate", node(ID, Package), Variant, Value, Source) :-
|
||||
attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source),
|
||||
node_has_variant(node(ID, Package), Variant),
|
||||
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
||||
not attr("variant_set", node(ID, Package), Variant).
|
||||
|
||||
% Propagate the value, if there is the corresponding attribute
|
||||
attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants)
|
||||
variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _).
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
not attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||
|
||||
% Cannot receive different values from different sources on the same variant
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
|
||||
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
|
||||
node_has_variant(node(X, Package), Variant),
|
||||
Value1 < Value2, Source1 < Source2.
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- attr("variant_set", node(X, Package), Variant),
|
||||
@@ -919,7 +888,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
||||
% variants set explicitly on the CLI don't count as non-default
|
||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||
% variant values forced by propagation don't count as non-default
|
||||
not attr("variant_propagate", node(ID, Package), Variant, Value, _),
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||
% variants set on externals that we could use don't count as non-default
|
||||
% this makes spack prefer to use an external over rebuilding with the
|
||||
% default configuration
|
||||
@@ -932,7 +901,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
||||
:- variant_default_value(Package, Variant, Value),
|
||||
node_has_variant(node(ID, Package), Variant),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
not attr("variant_propagate", node(ID, Package), Variant, _, _),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% The variant is set in an external spec
|
||||
@@ -989,6 +958,101 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
||||
#defined variant_default_value/3.
|
||||
#defined variant_default_value_from_packages_yaml/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Propagation semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Propagation roots have a corresponding attr("propagate", ...)
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||
|
||||
|
||||
% Propagate an attribute along edges to child nodes
|
||||
propagate(ChildNode, PropagatedAttribute) :-
|
||||
propagate(ParentNode, PropagatedAttribute),
|
||||
depends_on(ParentNode, ChildNode).
|
||||
|
||||
propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||
propagate(ParentNode, PropagatedAttribute, edge_types(DepType1, DepType2)),
|
||||
depends_on(ParentNode, ChildNode),
|
||||
1 { attr("depends_on", ParentNode, ChildNode, DepType1); attr("depends_on", ParentNode, ChildNode, DepType2) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Activation of propagated values
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
%----
|
||||
% Variants
|
||||
%----
|
||||
|
||||
% If a variant is propagated, and can be accepted, set its value
|
||||
attr("variant_value", node(ID, Package), Variant, Value) :-
|
||||
propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||
node_has_variant(node(ID, Package), Variant),
|
||||
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
||||
not attr("variant_set", node(ID, Package), Variant).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values
|
||||
variant_is_propagated(PackageNode, Variant) :-
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||
|
||||
%----
|
||||
% Flags
|
||||
%----
|
||||
|
||||
% A propagated flag implies:
|
||||
% 1. The same flag type is not set on this node
|
||||
% 2. This node has the same compiler as the propagation source
|
||||
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
||||
% Same compiler as propagation source
|
||||
node_compiler(node(PackageID, Package), CompilerID),
|
||||
node_compiler(SourceNode, CompilerID),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
||||
node(PackageID, Package) != SourceNode,
|
||||
not runtime(Package).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
||||
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
||||
|
||||
% Cannot propagate the same flag from two distinct sources
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
||||
Source1 < Source2.
|
||||
|
||||
%----
|
||||
% Compiler constraints
|
||||
%----
|
||||
|
||||
attr("node_compiler_version_satisfies", node(ID, Package), Compiler, Version) :-
|
||||
propagate(node(ID, Package), node_compiler_version_satisfies(Compiler, Version)),
|
||||
node_compiler(node(ID, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
not runtime(Package),
|
||||
not external(Package).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Runtimes
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Check whether the DAG has any built package
|
||||
has_built_packages() :- build(X), not external(X).
|
||||
|
||||
% If we build packages, the runtime nodes must use an available compiler
|
||||
1 { node_compiler(PackageNode, CompilerID) : build(PackageNode), not external(PackageNode) } :-
|
||||
has_built_packages(),
|
||||
runtime(RuntimePackage),
|
||||
node_compiler(node(_, RuntimePackage), CompilerID).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Platform semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1090,10 +1154,15 @@ attr("node_target", PackageNode, Target)
|
||||
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
||||
|
||||
% each node has the weight of its assigned target
|
||||
node_target_weight(node(ID, Package), Weight)
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("node_target", node(ID, Package), Target),
|
||||
target_weight(Target, Weight).
|
||||
target_weight(Target, 0)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_target", PackageNode, Target),
|
||||
attr("node_target_set", PackageNode, Target).
|
||||
|
||||
node_target_weight(PackageNode, MinWeight)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_target", PackageNode, Target),
|
||||
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match(ParentNode, DependencyNode)
|
||||
@@ -1155,12 +1224,12 @@ error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||
@@ -1241,45 +1310,9 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
||||
% Compiler flags
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% propagate flags when compiler match
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType)
|
||||
:- same_compiler(PackageNode, DependencyNode),
|
||||
not attr("node_flag_set", DependencyNode, FlagType, _),
|
||||
flag_type(FlagType).
|
||||
|
||||
same_compiler(PackageNode, DependencyNode)
|
||||
:- depends_on(PackageNode, DependencyNode),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
node_compiler(DependencyNode, CompilerID),
|
||||
compiler_id(CompilerID).
|
||||
|
||||
node_flag_inherited(DependencyNode, FlagType, Flag)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
% Ensure propagation
|
||||
:- node_flag_inherited(PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
depends_on(Source1, Package),
|
||||
depends_on(Source2, Package),
|
||||
attr("node_flag_propagate", Source1, FlagType),
|
||||
attr("node_flag_propagate", Source2, FlagType),
|
||||
can_inherit_flags(Source1, Package, FlagType),
|
||||
can_inherit_flags(Source2, Package, FlagType),
|
||||
Source1 < Source2.
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
|
||||
attr("node_flag_source", DependencyNode, FlagType, Q)
|
||||
:- attr("node_flag_source", PackageNode, FlagType, Q),
|
||||
node_flag_inherited(DependencyNode, FlagType, _),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", PackageNode, FlagType, Flag)
|
||||
@@ -1299,15 +1332,8 @@ attr("node_flag_compiler_default", PackageNode)
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
% if a flag is set to something or inherited, it's included
|
||||
% Flag set to something
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- node_flag_inherited(PackageNode, FlagType, Flag).
|
||||
|
||||
% if no node flags are set for a type, there are no flags.
|
||||
attr("no_flags", PackageNode, FlagType)
|
||||
:- not attr("node_flag", PackageNode, FlagType, _),
|
||||
attr("node", PackageNode),
|
||||
flag_type(FlagType).
|
||||
|
||||
#defined compiler_flag/3.
|
||||
|
||||
@@ -1496,7 +1522,7 @@ opt_criterion(45, "preferred providers (non-roots)").
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
opt_criterion(40, "compiler mismatches that are not required").
|
||||
#minimize{ 0@240: #true }.
|
||||
#minimize{ 0@40: #true }.
|
||||
#minimize{
|
||||
@@ -1506,7 +1532,7 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||
opt_criterion(39, "compiler mismatches that are required").
|
||||
#minimize{ 0@239: #true }.
|
||||
#minimize{ 0@39: #true }.
|
||||
#minimize{
|
||||
|
||||
@@ -4,21 +4,35 @@
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves (node with ID 0)
|
||||
% Heuristic to speed-up solves
|
||||
%=============================================================================
|
||||
|
||||
% No duplicates by default (most of them will be true)
|
||||
#heuristic attr("node", node(PackageID, Package)). [100, init]
|
||||
#heuristic attr("node", node(PackageID, Package)). [ 2, factor]
|
||||
#heuristic attr("virtual_node", node(VirtualID, Virtual)). [100, init]
|
||||
#heuristic attr("node", node(1..X-1, Package)) : max_dupes(Package, X), not virtual(Package), X > 1. [-1, sign]
|
||||
#heuristic attr("virtual_node", node(1..X-1, Package)) : max_dupes(Package, X), virtual(Package) , X > 1. [-1, sign]
|
||||
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
% Pick preferred version
|
||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)). [40, init]
|
||||
#heuristic version_weight(node(PackageID, Package), 0) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
||||
|
||||
% Root node
|
||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
% Use default variants
|
||||
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, true]
|
||||
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : not variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, false]
|
||||
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
% Use default operating system and platform
|
||||
#heuristic attr("node_os", node(PackageID, Package), OS) : os(OS, 0), attr("root", node(PackageID, Package)). [40, true]
|
||||
#heuristic attr("node_platform", node(PackageID, Package), Platform) : allowed_platform(Platform), attr("root", node(PackageID, Package)). [40, true]
|
||||
|
||||
% Use default targets
|
||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [30, init]
|
||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [ 2, factor]
|
||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, 0), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
||||
|
||||
% Use the default compilers
|
||||
#heuristic node_compiler(node(PackageID, Package), ID) : compiler_weight(ID, 0), compiler_id(ID), attr("node", node(PackageID, Package)). [30, init]
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves (node with ID > 0)
|
||||
%=============================================================================
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
@@ -18,9 +18,6 @@ error(100, "Cannot reuse {0} since we cannot determine libc compatibility", Reus
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
|
||||
% Check whether the DAG has any built package
|
||||
has_built_packages() :- build(X), not external(X).
|
||||
|
||||
% A libc is needed in the DAG
|
||||
:- has_built_packages(), not provider(_, node(0, "libc")).
|
||||
|
||||
|
||||
@@ -1287,6 +1287,102 @@ def copy(self, *args, **kwargs):
|
||||
return self.wrapped_obj.copy(*args, **kwargs)
|
||||
|
||||
|
||||
def tree(
|
||||
specs: List["spack.spec.Spec"],
|
||||
*,
|
||||
color: Optional[bool] = None,
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: str = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key=id,
|
||||
) -> str:
|
||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
|
||||
Args:
|
||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||
use the default from llnl.tty.color
|
||||
depth: print the depth from the root
|
||||
hashes: if True, print the hash of each node
|
||||
hashlen: length of the hash to be printed
|
||||
cover: either "nodes" or "edges"
|
||||
indent: extra indentation for the tree being printed
|
||||
format: format to be used to print each node
|
||||
deptypes: dependency types to be represented in the tree
|
||||
show_types: if True, show the (merged) dependency type of a node
|
||||
depth_first: if True, traverse the DAG depth first when representing it as a tree
|
||||
recurse_dependencies: if True, recurse on dependencies
|
||||
status_fn: optional callable that takes a node as an argument and return its
|
||||
installation status
|
||||
prefix: optional callable that takes a node as an argument and return its
|
||||
installation prefix
|
||||
"""
|
||||
out = ""
|
||||
|
||||
if color is None:
|
||||
color = clr.get_color_when()
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
out += prefix(node)
|
||||
out += " " * indent
|
||||
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||
|
||||
if show_types:
|
||||
if cover == "nodes":
|
||||
# when only covering nodes, we merge dependency types
|
||||
# from all dependents before showing them.
|
||||
depflag = 0
|
||||
for ds in node.edges_from_dependents():
|
||||
depflag |= ds.depflag
|
||||
else:
|
||||
# when covering edges or paths, we show dependency
|
||||
# types only for the edge through which we visited
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
type_chars = dt.flag_to_chars(depflag)
|
||||
out += "[%s] " % type_chars
|
||||
|
||||
out += " " * d
|
||||
if d > 0:
|
||||
out += "^"
|
||||
out += node.format(format, color=color) + "\n"
|
||||
|
||||
# Check if we wanted just the first line
|
||||
if not recurse_dependencies:
|
||||
break
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||
class Spec:
|
||||
#: Cache for spec's prefix, computed lazily in the corresponding property
|
||||
@@ -1454,9 +1550,7 @@ def _get_dependency(self, name):
|
||||
raise spack.error.SpecError(err_msg.format(name, len(deps)))
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
|
||||
@@ -1466,9 +1560,7 @@ def edges_from_dependents(
|
||||
"""
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
|
||||
@@ -4608,13 +4700,14 @@ def tree(
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key=id,
|
||||
) -> str:
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation.
|
||||
"""Prints out this spec and its dependencies, tree-formatted with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
See multi-spec ``spack.spec.tree()`` function for details.
|
||||
|
||||
Args:
|
||||
specs: List of specs to format.
|
||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||
use the default from llnl.tty.color
|
||||
depth: print the depth from the root
|
||||
@@ -4632,60 +4725,23 @@ def tree(
|
||||
prefix: optional callable that takes a node as an argument and return its
|
||||
installation prefix
|
||||
"""
|
||||
out = ""
|
||||
|
||||
if color is None:
|
||||
color = clr.get_color_when()
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
[self], cover=cover, deptype=deptypes, depth_first=depth_first
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
out += prefix(node)
|
||||
out += " " * indent
|
||||
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||
|
||||
if show_types:
|
||||
if cover == "nodes":
|
||||
# when only covering nodes, we merge dependency types
|
||||
# from all dependents before showing them.
|
||||
depflag = 0
|
||||
for ds in node.edges_from_dependents():
|
||||
depflag |= ds.depflag
|
||||
else:
|
||||
# when covering edges or paths, we show dependency
|
||||
# types only for the edge through which we visited
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
type_chars = dt.flag_to_chars(depflag)
|
||||
out += "[%s] " % type_chars
|
||||
|
||||
out += " " * d
|
||||
if d > 0:
|
||||
out += "^"
|
||||
out += node.format(format, color=color) + "\n"
|
||||
|
||||
# Check if we wanted just the first line
|
||||
if not recurse_dependencies:
|
||||
break
|
||||
|
||||
return out
|
||||
return tree(
|
||||
[self],
|
||||
color=color,
|
||||
depth=depth,
|
||||
hashes=hashes,
|
||||
hashlen=hashlen,
|
||||
cover=cover,
|
||||
indent=indent,
|
||||
format=format,
|
||||
deptypes=deptypes,
|
||||
show_types=show_types,
|
||||
depth_first=depth_first,
|
||||
recurse_dependencies=recurse_dependencies,
|
||||
status_fn=status_fn,
|
||||
prefix=prefix,
|
||||
key=key,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
@@ -212,10 +212,7 @@ def _expand_matrix_constraints(matrix_config):
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
|
||||
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
@@ -231,7 +228,9 @@ def _expand_matrix_constraints(matrix_config):
|
||||
spack.variant.substitute_abstract_variants(test_spec)
|
||||
except spack.variant.UnknownVariantError:
|
||||
pass
|
||||
if any(test_spec.satisfies(x) for x in excludes):
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
|
||||
@@ -346,8 +346,6 @@ class Stage(LockableStagingDir):
|
||||
similar, and are intended to persist for only one run of spack.
|
||||
"""
|
||||
|
||||
#: Most staging is managed by Spack. DIYStage is one exception.
|
||||
needs_fetching = True
|
||||
requires_patch_success = True
|
||||
|
||||
def __init__(
|
||||
@@ -772,8 +770,6 @@ def __init__(self):
|
||||
"cache_mirror",
|
||||
"steal_source",
|
||||
"disable_mirrors",
|
||||
"needs_fetching",
|
||||
"requires_patch_success",
|
||||
]
|
||||
)
|
||||
|
||||
@@ -812,6 +808,10 @@ def path(self):
|
||||
def archive_file(self):
|
||||
return self[0].archive_file
|
||||
|
||||
@property
|
||||
def requires_patch_success(self):
|
||||
return self[0].requires_patch_success
|
||||
|
||||
@property
|
||||
def keep(self):
|
||||
return self[0].keep
|
||||
@@ -822,64 +822,7 @@ def keep(self, value):
|
||||
item.keep = value
|
||||
|
||||
|
||||
class DIYStage:
|
||||
"""
|
||||
Simple class that allows any directory to be a spack stage. Consequently,
|
||||
it does not expect or require that the source path adhere to the standard
|
||||
directory naming convention.
|
||||
"""
|
||||
|
||||
needs_fetching = False
|
||||
requires_patch_success = False
|
||||
|
||||
def __init__(self, path):
|
||||
if path is None:
|
||||
raise ValueError("Cannot construct DIYStage without a path.")
|
||||
elif not os.path.isdir(path):
|
||||
raise StagePathError("The stage path directory does not exist:", path)
|
||||
|
||||
self.archive_file = None
|
||||
self.path = path
|
||||
self.source_path = path
|
||||
self.created = True
|
||||
|
||||
# DIY stages do nothing as context managers.
|
||||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
def fetch(self, *args, **kwargs):
|
||||
tty.debug("No need to fetch for DIY.")
|
||||
|
||||
def check(self):
|
||||
tty.debug("No checksum needed for DIY.")
|
||||
|
||||
def expand_archive(self):
|
||||
tty.debug("Using source directory: {0}".format(self.source_path))
|
||||
|
||||
@property
|
||||
def expanded(self):
|
||||
"""Returns True since the source_path must exist."""
|
||||
return True
|
||||
|
||||
def restage(self):
|
||||
raise RestageError("Cannot restage a DIY stage.")
|
||||
|
||||
def create(self):
|
||||
self.created = True
|
||||
|
||||
def destroy(self):
|
||||
# No need to destroy DIY stage.
|
||||
pass
|
||||
|
||||
def cache_local(self):
|
||||
tty.debug("Sources for DIY stages are not cached")
|
||||
|
||||
|
||||
class DevelopStage(LockableStagingDir):
|
||||
needs_fetching = False
|
||||
requires_patch_success = False
|
||||
|
||||
def __init__(self, name, dev_path, reference_link):
|
||||
|
||||
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
|
||||
cfg = spack.config.Configuration(
|
||||
*[
|
||||
spack.config.ConfigScope(name, str(mutable_dir))
|
||||
for name in ["site/%s" % platform.system().lower(), "site", "user"]
|
||||
spack.config.DirectoryConfigScope(name, str(mutable_dir))
|
||||
for name in [f"site/{platform.system().lower()}", "site", "user"]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -828,14 +828,14 @@ def test_keep_and_replace(wrapper_environment):
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:specific",
|
||||
["-Werror", "-Werror=specific", "-bah"],
|
||||
["-Werror=specific", "-bah"],
|
||||
["-Werror", "-Werror=specific", "-Werror-specific2", "-bah"],
|
||||
["-Wno-error", "-Werror=specific", "-Werror-specific2", "-bah"],
|
||||
["-Werror"],
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:none",
|
||||
["-Werror", "-Werror=specific", "-bah"],
|
||||
["-bah", "-Wno-error", "-Wno-error=specific"],
|
||||
["-Wno-error", "-Wno-error=specific", "-bah"],
|
||||
["-Werror", "-Werror=specific"],
|
||||
),
|
||||
# check non-standard -Werror opts like -Werror-implicit-function-declaration
|
||||
@@ -848,13 +848,13 @@ def test_keep_and_replace(wrapper_environment):
|
||||
(
|
||||
"config:flags:keep_werror:specific",
|
||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-Werror-implicit-function-declaration", "-bah", "-Wno-error"],
|
||||
["-Wno-error", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-Werror"],
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:none",
|
||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-bah", "-Wno-error=implicit-function-declaration"],
|
||||
["-Wno-error", "-bah", "-Wno-error=implicit-function-declaration"],
|
||||
["-Werror", "-Werror-implicit-function-declaration"],
|
||||
),
|
||||
],
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
@@ -11,15 +10,12 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.ci as ci
|
||||
import spack.ci_needs_workaround as cinw
|
||||
import spack.ci_optimization as ci_opt
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.paths as spack_paths
|
||||
import spack.util.git
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -203,164 +199,6 @@ def __call__(self, *args, **kwargs):
|
||||
assert "Unable to merge {0}".format(c1) in err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("obj, proto", [({}, [])])
|
||||
def test_ci_opt_argument_checking(obj, proto):
|
||||
"""Check that matches() and subkeys() return False when `proto` is not a dict."""
|
||||
assert not ci_opt.matches(obj, proto)
|
||||
assert not ci_opt.subkeys(obj, proto)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("yaml", [{"extends": 1}])
|
||||
def test_ci_opt_add_extends_non_sequence(yaml):
|
||||
"""Check that add_extends() exits if 'extends' is not a sequence."""
|
||||
yaml_copy = yaml.copy()
|
||||
ci_opt.add_extends(yaml, None)
|
||||
assert yaml == yaml_copy
|
||||
|
||||
|
||||
def test_ci_workarounds():
|
||||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
" && ".join(("pushd ./spack", 'git checkout "{ref}"'.format(ref=fake_spack_ref), "popd")),
|
||||
'. "./spack/share/spack/setup-env.sh"',
|
||||
]
|
||||
|
||||
def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dependencies):
|
||||
variables = common_variables.copy()
|
||||
variables["SPACK_JOB_SPEC_PKG_NAME"] = name
|
||||
|
||||
result = {
|
||||
"stage": stage,
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"artifacts": {
|
||||
"paths": ["jobs_scratch_dir", "cdash_report", name + ".spec.json", name],
|
||||
"when": "always",
|
||||
},
|
||||
"retry": {"max": 2, "when": ["always"]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"script": ["spack ci rebuild"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result["extends"] = [".c0", ".c1"]
|
||||
else:
|
||||
variables["SPACK_ROOT_SPEC"] = fake_root_spec
|
||||
result["before_script"] = common_before_script
|
||||
|
||||
result["variables"] = variables
|
||||
|
||||
if use_dependencies:
|
||||
result["dependencies"] = list(deps) if use_artifact_buildcache else []
|
||||
else:
|
||||
result["needs"] = [{"job": dep, "artifacts": use_artifact_buildcache} for dep in deps]
|
||||
|
||||
return {name: result}
|
||||
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result["extends"] = ".c0"
|
||||
else:
|
||||
result["before_script"] = common_before_script
|
||||
|
||||
return {"rebuild-index": result}
|
||||
|
||||
def make_factored_jobs(optimize):
|
||||
return (
|
||||
{
|
||||
".c0": {"before_script": common_before_script},
|
||||
".c1": {"variables": {"SPACK_ROOT_SPEC": fake_root_spec}},
|
||||
}
|
||||
if optimize
|
||||
else {}
|
||||
)
|
||||
|
||||
def make_stage_list(num_build_stages):
|
||||
return {
|
||||
"stages": (
|
||||
["-".join(("stage", str(i))) for i in range(num_build_stages)]
|
||||
+ ["stage-rebuild-index"]
|
||||
)
|
||||
}
|
||||
|
||||
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {}
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-a", [], "stage-0", use_artifact_buildcache, optimize, use_dependencies
|
||||
)
|
||||
)
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-b", ["pkg-a"], "stage-1", use_artifact_buildcache, optimize, use_dependencies
|
||||
)
|
||||
)
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-c",
|
||||
["pkg-a", "pkg-b"],
|
||||
"stage-2",
|
||||
use_artifact_buildcache,
|
||||
optimize,
|
||||
use_dependencies,
|
||||
)
|
||||
)
|
||||
|
||||
result.update(make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies))
|
||||
|
||||
result.update(make_factored_jobs(optimize))
|
||||
|
||||
result.update(make_stage_list(3))
|
||||
|
||||
return result
|
||||
|
||||
# test every combination of:
|
||||
# use artifact buildcache: true or false
|
||||
# run optimization pass: true or false
|
||||
# convert needs to dependencies: true or false
|
||||
for use_ab in (False, True):
|
||||
original = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
|
||||
)
|
||||
|
||||
for opt, deps in itertools.product(*(((False, True),) * 2)):
|
||||
# neither optimizing nor converting needs->dependencies
|
||||
if not (opt or deps):
|
||||
# therefore, nothing to test
|
||||
continue
|
||||
|
||||
predicted = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab, optimize=opt, use_dependencies=deps
|
||||
)
|
||||
|
||||
actual = original.copy()
|
||||
if opt:
|
||||
actual = ci_opt.optimizer(actual)
|
||||
if deps:
|
||||
actual = cinw.needs_to_dependencies(actual)
|
||||
|
||||
predicted = syaml.dump_config(ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
|
||||
actual = syaml.dump_config(ci_opt.sort_yaml_obj(actual), default_flow_style=True)
|
||||
|
||||
assert predicted == actual
|
||||
|
||||
|
||||
def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
||||
"""Test that given an active environment and list of touched pkgs,
|
||||
we get the right list of possibly-changed env specs"""
|
||||
|
||||
@@ -1432,55 +1432,6 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["after_script"][0] == "post step one"
|
||||
|
||||
|
||||
def test_ci_generate_with_workarounds(
|
||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||
):
|
||||
"""Make sure the post-processing cli workarounds do what they should"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- callpath%gcc@=9.5
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match: ['%gcc@9.5']
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
enable-artifacts-buildcache: true
|
||||
"""
|
||||
)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile, "--dependencies")
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_one = False
|
||||
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key not in non_rebuild_keys:
|
||||
found_one = True
|
||||
job_obj = yaml_contents[ci_key]
|
||||
assert "needs" not in job_obj
|
||||
assert "dependencies" in job_obj
|
||||
|
||||
assert found_one is True
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_ci_rebuild_index(
|
||||
tmpdir,
|
||||
|
||||
46
lib/spack/spack/test/cmd/edit.py
Normal file
46
lib/spack/spack/test/cmd/edit.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.build_systems import autotools, cmake
|
||||
from spack.main import SpackCommand
|
||||
|
||||
edit = SpackCommand("edit")
|
||||
|
||||
|
||||
def test_edit_packages(monkeypatch, mock_packages: spack.repo.RepoPath):
|
||||
"""Test spack edit a b"""
|
||||
path_a = mock_packages.filename_for_package_name("a")
|
||||
path_b = mock_packages.filename_for_package_name("b")
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
assert args[0] == path_a
|
||||
assert args[1] == path_b
|
||||
|
||||
monkeypatch.setattr(spack.util.editor, "editor", editor)
|
||||
edit("a", "b")
|
||||
assert called
|
||||
|
||||
|
||||
def test_edit_files(monkeypatch):
|
||||
"""Test spack edit --build-system autotools cmake"""
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
assert os.path.samefile(args[0], autotools.__file__)
|
||||
assert os.path.samefile(args[1], cmake.__file__)
|
||||
|
||||
monkeypatch.setattr(spack.util.editor, "editor", editor)
|
||||
edit("--build-system", "autotools", "cmake")
|
||||
assert called
|
||||
@@ -434,7 +434,7 @@ def test_find_loaded(database, working_env):
|
||||
output = find("--loaded", "--group")
|
||||
assert output == ""
|
||||
|
||||
os.environ[uenv.spack_loaded_hashes_var] = ":".join(
|
||||
os.environ[uenv.spack_loaded_hashes_var] = os.pathsep.join(
|
||||
[x.dag_hash() for x in spack.store.STORE.db.query()]
|
||||
)
|
||||
output = find("--loaded")
|
||||
|
||||
@@ -57,9 +57,9 @@ def test_info_noversion(mock_packages, print_buffer):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query,expected", [("zlib", "False"), ("gcc", "True (version, variants)")]
|
||||
"pkg_query,expected", [("zlib", "False"), ("find-externals1", "True (version)")]
|
||||
)
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
|
||||
def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, print_buffer):
|
||||
args = parser.parse_args(["--detectable", pkg_query])
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,101 +18,125 @@
|
||||
install = SpackCommand("install")
|
||||
location = SpackCommand("location")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_manpath_trailing_colon(
|
||||
install_mockery, mock_fetch, mock_archive, mock_packages, working_env
|
||||
):
|
||||
(shell, set_command, commandsep) = (
|
||||
("--bat", 'set "%s=%s"', "\n")
|
||||
if sys.platform == "win32"
|
||||
else ("--sh", "export %s=%s", ";")
|
||||
)
|
||||
|
||||
"""Test that the commands generated by load add the MANPATH prefix
|
||||
inspections. Also test that Spack correctly preserves the default/existing
|
||||
manpath search path via a trailing colon"""
|
||||
install("mpileaks")
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
lines = sh_out.split("\n")
|
||||
assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines)
|
||||
sh_out = load(shell, "mpileaks")
|
||||
lines = [line.strip("\n") for line in sh_out.split(commandsep)]
|
||||
assert any(re.match(set_command % ("MANPATH", ".*" + os.pathsep), ln) for ln in lines)
|
||||
os.environ["MANPATH"] = "/tmp/man" + os.pathsep
|
||||
|
||||
os.environ["MANPATH"] = "/tmp/man:"
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
lines = sh_out.split("\n")
|
||||
assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines)
|
||||
sh_out = load(shell, "mpileaks")
|
||||
lines = [line.strip("\n") for line in sh_out.split(commandsep)]
|
||||
assert any(
|
||||
re.match(set_command % ("MANPATH", ".*" + os.pathsep + "/tmp/man" + os.pathsep), ln)
|
||||
for ln in lines
|
||||
)
|
||||
|
||||
|
||||
def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages, working_env):
|
||||
"""Test that `spack load` applies prefix inspections of its required runtime deps in
|
||||
topo-order"""
|
||||
install("mpileaks")
|
||||
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
|
||||
def test_load_shell(shell, set_command):
|
||||
"""Test that `spack load` applies prefix inspections of its required runtime deps in
|
||||
topo-order"""
|
||||
install("mpileaks")
|
||||
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
|
||||
|
||||
# Ensure our reference variable is cleed.
|
||||
os.environ["CMAKE_PREFIX_PATH"] = "/hello:/world"
|
||||
# Ensure our reference variable is clean.
|
||||
os.environ["CMAKE_PREFIX_PATH"] = "/hello" + os.pathsep + "/world"
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
csh_out = load("--csh", "mpileaks")
|
||||
shell_out = load(shell, "mpileaks")
|
||||
|
||||
def extract_cmake_prefix_path(output, prefix):
|
||||
return next(cmd for cmd in output.split(";") if cmd.startswith(prefix))[
|
||||
len(prefix) :
|
||||
].split(":")
|
||||
def extract_value(output, variable):
|
||||
match = re.search(set_command % variable, output, flags=re.MULTILINE)
|
||||
value = match.group(1)
|
||||
return value.split(os.pathsep)
|
||||
|
||||
# Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG.
|
||||
prefix_to_pkg = lambda prefix: next(
|
||||
s.name for s in mpileaks_spec.traverse() if s.prefix == prefix
|
||||
)
|
||||
# Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG.
|
||||
prefix_to_pkg = lambda prefix: next(
|
||||
s.name for s in mpileaks_spec.traverse() if s.prefix == prefix
|
||||
)
|
||||
|
||||
paths_sh = extract_cmake_prefix_path(sh_out, prefix="export CMAKE_PREFIX_PATH=")
|
||||
paths_csh = extract_cmake_prefix_path(csh_out, prefix="setenv CMAKE_PREFIX_PATH ")
|
||||
paths_shell = extract_value(shell_out, "CMAKE_PREFIX_PATH")
|
||||
|
||||
# Shouldn't be a difference between loading csh / sh, so check they're the same.
|
||||
assert paths_sh == paths_csh
|
||||
# We should've prepended new paths, and keep old ones.
|
||||
assert paths_shell[-2:] == ["/hello", "/world"]
|
||||
|
||||
# We should've prepended new paths, and keep old ones.
|
||||
assert paths_sh[-2:] == ["/hello", "/world"]
|
||||
# All but the last two paths are added by spack load; lookup what packages they're from.
|
||||
pkgs = [prefix_to_pkg(p) for p in paths_shell[:-2]]
|
||||
|
||||
# All but the last two paths are added by spack load; lookup what packages they're from.
|
||||
pkgs = [prefix_to_pkg(p) for p in paths_sh[:-2]]
|
||||
# Do we have all the runtime packages?
|
||||
assert set(pkgs) == set(
|
||||
s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True)
|
||||
)
|
||||
|
||||
# Do we have all the runtime packages?
|
||||
assert set(pkgs) == set(
|
||||
s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True)
|
||||
)
|
||||
# Finally, do we list them in topo order?
|
||||
for i, pkg in enumerate(pkgs):
|
||||
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
|
||||
|
||||
# Finally, do we list them in topo order?
|
||||
for i, pkg in enumerate(pkgs):
|
||||
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
|
||||
# Lastly, do we keep track that mpileaks was loaded?
|
||||
assert (
|
||||
extract_value(shell_out, uenv.spack_loaded_hashes_var)[0] == mpileaks_spec.dag_hash()
|
||||
)
|
||||
return paths_shell
|
||||
|
||||
# Lastly, do we keep track that mpileaks was loaded?
|
||||
assert f"export {uenv.spack_loaded_hashes_var}={mpileaks_spec.dag_hash()}" in sh_out
|
||||
assert f"setenv {uenv.spack_loaded_hashes_var} {mpileaks_spec.dag_hash()}" in csh_out
|
||||
if sys.platform == "win32":
|
||||
shell, set_command = ("--bat", r'set "%s=(.*)"')
|
||||
test_load_shell(shell, set_command)
|
||||
else:
|
||||
params = [("--sh", r"export %s=([^;]*)"), ("--csh", r"setenv %s ([^;]*)")]
|
||||
shell, set_command = params[0]
|
||||
paths_sh = test_load_shell(shell, set_command)
|
||||
shell, set_command = params[1]
|
||||
paths_csh = test_load_shell(shell, set_command)
|
||||
assert paths_sh == paths_csh
|
||||
|
||||
|
||||
def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
@pytest.mark.parametrize(
|
||||
"shell,set_command",
|
||||
(
|
||||
[("--bat", 'set "%s=%s"')]
|
||||
if sys.platform == "win32"
|
||||
else [("--sh", "export %s=%s"), ("--csh", "setenv %s %s")]
|
||||
),
|
||||
)
|
||||
def test_load_includes_run_env(
|
||||
shell, set_command, install_mockery, mock_fetch, mock_archive, mock_packages
|
||||
):
|
||||
"""Tests that environment changes from the package's
|
||||
`setup_run_environment` method are added to the user environment in
|
||||
addition to the prefix inspections"""
|
||||
install("mpileaks")
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
csh_out = load("--csh", "mpileaks")
|
||||
shell_out = load(shell, "mpileaks")
|
||||
|
||||
assert "export FOOBAR=mpileaks" in sh_out
|
||||
assert "setenv FOOBAR mpileaks" in csh_out
|
||||
assert set_command % ("FOOBAR", "mpileaks") in shell_out
|
||||
|
||||
|
||||
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test with and without the --first option"""
|
||||
shell = "--bat" if sys.platform == "win32" else "--sh"
|
||||
install("libelf@0.8.12")
|
||||
install("libelf@0.8.13")
|
||||
|
||||
# Now there are two versions of libelf, which should cause an error
|
||||
out = load("--sh", "libelf", fail_on_error=False)
|
||||
out = load(shell, "libelf", fail_on_error=False)
|
||||
assert "matches multiple packages" in out
|
||||
assert "Use a more specific spec" in out
|
||||
|
||||
# Using --first should avoid the error condition
|
||||
load("--sh", "--first", "libelf")
|
||||
load(shell, "--first", "libelf")
|
||||
|
||||
|
||||
def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
@@ -122,7 +147,24 @@ def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive, mock_pac
|
||||
assert "To set up shell support" in out
|
||||
|
||||
|
||||
def test_unload(install_mockery, mock_fetch, mock_archive, mock_packages, working_env):
|
||||
@pytest.mark.parametrize(
|
||||
"shell,set_command,unset_command",
|
||||
(
|
||||
[("--bat", 'set "%s=%s"', 'set "%s="')]
|
||||
if sys.platform == "win32"
|
||||
else [("--sh", "export %s=%s", "unset %s"), ("--csh", "setenv %s %s", "unsetenv %s")]
|
||||
),
|
||||
)
|
||||
def test_unload(
|
||||
shell,
|
||||
set_command,
|
||||
unset_command,
|
||||
install_mockery,
|
||||
mock_fetch,
|
||||
mock_archive,
|
||||
mock_packages,
|
||||
working_env,
|
||||
):
|
||||
"""Tests that any variables set in the user environment are undone by the
|
||||
unload command"""
|
||||
install("mpileaks")
|
||||
@@ -130,16 +172,16 @@ def test_unload(install_mockery, mock_fetch, mock_archive, mock_packages, workin
|
||||
|
||||
# Set so unload has something to do
|
||||
os.environ["FOOBAR"] = "mpileaks"
|
||||
os.environ[uenv.spack_loaded_hashes_var] = "%s:%s" % (mpileaks_spec.dag_hash(), "garbage")
|
||||
os.environ[uenv.spack_loaded_hashes_var] = ("%s" + os.pathsep + "%s") % (
|
||||
mpileaks_spec.dag_hash(),
|
||||
"garbage",
|
||||
)
|
||||
|
||||
sh_out = unload("--sh", "mpileaks")
|
||||
csh_out = unload("--csh", "mpileaks")
|
||||
shell_out = unload(shell, "mpileaks")
|
||||
|
||||
assert "unset FOOBAR" in sh_out
|
||||
assert "unsetenv FOOBAR" in csh_out
|
||||
assert (unset_command % "FOOBAR") in shell_out
|
||||
|
||||
assert "export %s=garbage" % uenv.spack_loaded_hashes_var in sh_out
|
||||
assert "setenv %s garbage" % uenv.spack_loaded_hashes_var in csh_out
|
||||
assert set_command % (uenv.spack_loaded_hashes_var, "garbage") in shell_out
|
||||
|
||||
|
||||
def test_unload_fails_no_shell(
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack.cmd.pkg
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.util.file_cache
|
||||
|
||||
#: new fake package template
|
||||
pkg_template = """\
|
||||
@@ -34,13 +35,14 @@ def install(self, spec, prefix):
|
||||
|
||||
# Force all tests to use a git repository *in* the mock packages repo.
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_pkg_git_repo(git, tmpdir_factory):
|
||||
def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
"""Copy the builtin.mock repo and make a mutable git repo inside it."""
|
||||
tmproot = tmpdir_factory.mktemp("mock_pkg_git_repo")
|
||||
repo_path = tmproot.join("builtin.mock")
|
||||
root_dir = tmp_path_factory.mktemp("mock_pkg_git_repo")
|
||||
repo_dir = root_dir / "builtin.mock"
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_path))
|
||||
mock_repo = spack.repo.RepoPath(str(repo_path))
|
||||
repo_cache = spack.util.file_cache.FileCache(str(root_dir / "cache"))
|
||||
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
||||
mock_repo_packages = mock_repo.repos[0].packages_path
|
||||
|
||||
with working_dir(mock_repo_packages):
|
||||
@@ -75,7 +77,7 @@ def mock_pkg_git_repo(git, tmpdir_factory):
|
||||
git("rm", "-rf", "pkg-c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "change pkg-b, remove pkg-c, add pkg-d")
|
||||
|
||||
with spack.repo.use_repositories(str(repo_path)):
|
||||
with spack.repo.use_repositories(str(repo_dir)):
|
||||
yield mock_repo_packages
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ def flake8_package(tmpdir):
|
||||
change to the ``flake8`` mock package, yields the filename, then undoes the
|
||||
change on cleanup.
|
||||
"""
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
repo = spack.repo.from_path(spack.paths.mock_packages_path)
|
||||
filename = repo.filename_for_package_name("flake8")
|
||||
rel_path = os.path.dirname(os.path.relpath(filename, spack.paths.prefix))
|
||||
tmp = tmpdir / rel_path / "flake8-ci-package.py"
|
||||
@@ -54,7 +54,7 @@ def flake8_package(tmpdir):
|
||||
@pytest.fixture
|
||||
def flake8_package_with_errors(scope="function"):
|
||||
"""A flake8 package with errors."""
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
repo = spack.repo.from_path(spack.paths.mock_packages_path)
|
||||
filename = repo.filename_for_package_name("flake8")
|
||||
tmp = filename + ".tmp"
|
||||
|
||||
@@ -130,7 +130,7 @@ def test_changed_files_all_files():
|
||||
assert os.path.join(spack.paths.module_path, "spec.py") in files
|
||||
|
||||
# a mock package
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
repo = spack.repo.from_path(spack.paths.mock_packages_path)
|
||||
filename = repo.filename_for_package_name("flake8")
|
||||
assert filename in files
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.util.file_cache
|
||||
import spack.util.libc
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
@@ -168,19 +169,18 @@ def reverser(pkg_name):
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def repo_with_changing_recipe(tmpdir_factory, mutable_mock_repo):
|
||||
def repo_with_changing_recipe(tmp_path_factory, mutable_mock_repo):
|
||||
repo_namespace = "changing"
|
||||
repo_dir = tmpdir_factory.mktemp(repo_namespace)
|
||||
repo_dir = tmp_path_factory.mktemp(repo_namespace)
|
||||
|
||||
repo_dir.join("repo.yaml").write(
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
"""
|
||||
repo:
|
||||
namespace: changing
|
||||
""",
|
||||
ensure=True,
|
||||
"""
|
||||
)
|
||||
|
||||
packages_dir = repo_dir.ensure("packages", dir=True)
|
||||
packages_dir = repo_dir / "packages"
|
||||
root_pkg_str = """
|
||||
class Root(Package):
|
||||
homepage = "http://www.example.com"
|
||||
@@ -191,7 +191,9 @@ class Root(Package):
|
||||
|
||||
conflicts("^changing~foo")
|
||||
"""
|
||||
packages_dir.join("root", "package.py").write(root_pkg_str, ensure=True)
|
||||
package_py = packages_dir / "root" / "package.py"
|
||||
package_py.parent.mkdir(parents=True)
|
||||
package_py.write_text(root_pkg_str)
|
||||
|
||||
changing_template = """
|
||||
class Changing(Package):
|
||||
@@ -225,7 +227,9 @@ class _ChangingPackage:
|
||||
|
||||
def __init__(self, repo_directory):
|
||||
self.repo_dir = repo_directory
|
||||
self.repo = spack.repo.Repo(str(repo_directory))
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
self.repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
self.repo = spack.repo.Repo(str(repo_directory), cache=self.repo_cache)
|
||||
|
||||
def change(self, changes=None):
|
||||
changes = changes or {}
|
||||
@@ -246,10 +250,12 @@ def change(self, changes=None):
|
||||
# Change the recipe
|
||||
t = jinja2.Template(changing_template)
|
||||
changing_pkg_str = t.render(**context)
|
||||
packages_dir.join("changing", "package.py").write(changing_pkg_str, ensure=True)
|
||||
package_py = packages_dir / "changing" / "package.py"
|
||||
package_py.parent.mkdir(parents=True, exist_ok=True)
|
||||
package_py.write_text(changing_pkg_str)
|
||||
|
||||
# Re-add the repository
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir))
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir), cache=self.repo_cache)
|
||||
repository.put_first(self.repo)
|
||||
|
||||
_changing_pkg = _ChangingPackage(repo_dir)
|
||||
@@ -421,30 +427,38 @@ def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_compiler_flag_propagate(self):
|
||||
spec = Spec("hypre cflags=='-g' ^openblas")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^openblas cflags='-g'")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected,not_expected",
|
||||
[
|
||||
# Simple flag propagation from the root
|
||||
("hypre cflags=='-g' ^openblas", ["hypre cflags='-g'", "^openblas cflags='-g'"], []),
|
||||
(
|
||||
"hypre cflags='-g' ^openblas",
|
||||
["hypre cflags='-g'", "^openblas"],
|
||||
["^openblas cflags='-g'"],
|
||||
),
|
||||
# Setting a flag overrides propagation
|
||||
(
|
||||
"hypre cflags=='-g' ^openblas cflags='-O3'",
|
||||
["hypre cflags='-g'", "^openblas cflags='-O3'"],
|
||||
["^openblas cflags='-g'"],
|
||||
),
|
||||
# Propagation doesn't go across build dependencies
|
||||
(
|
||||
"cmake-client cflags=='-O2 -g'",
|
||||
["cmake-client cflags=='-O2 -g'", "^cmake"],
|
||||
["cmake cflags=='-O2 -g'"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_concretize_compiler_flag_does_not_propagate(self):
|
||||
spec = Spec("hypre cflags='-g' ^openblas")
|
||||
spec.concretize()
|
||||
def test_compiler_flag_propagation(self, spec_str, expected, not_expected):
|
||||
root = Spec(spec_str).concretized()
|
||||
|
||||
assert not spec.satisfies("^openblas cflags='-g'")
|
||||
for constraint in expected:
|
||||
assert root.satisfies(constraint)
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self):
|
||||
spec = Spec("hypre cflags=='-g' ^openblas cflags='-O3'")
|
||||
spec.concretize()
|
||||
|
||||
assert set(spec.compiler_flags["cflags"]) == set(["-g"])
|
||||
assert spec.satisfies("^openblas cflags='-O3'")
|
||||
for constraint in not_expected:
|
||||
assert not root.satisfies(constraint)
|
||||
|
||||
def test_mixing_compilers_only_affects_subdag(self):
|
||||
spack.config.set("packages:all:compiler", ["clang", "gcc"])
|
||||
@@ -1767,21 +1781,21 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
|
||||
assert s.namespace == "builtin.mock"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specs,expected",
|
||||
"specs,expected,libc_offset",
|
||||
[
|
||||
(["libelf", "libelf@0.8.10"], 1),
|
||||
(["libdwarf%gcc", "libelf%clang"], 2),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 3),
|
||||
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
|
||||
(["hdf5", "zmpi"], 3),
|
||||
(["hdf5", "mpich"], 2),
|
||||
(["hdf5^zmpi", "mpich"], 4),
|
||||
(["mpi", "zmpi"], 2),
|
||||
(["mpi", "mpich"], 1),
|
||||
(["libelf", "libelf@0.8.10"], 1, 1),
|
||||
(["libdwarf%gcc", "libelf%clang"], 2, 1),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 3, 2),
|
||||
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4, 1),
|
||||
(["hdf5", "zmpi"], 3, 1),
|
||||
(["hdf5", "mpich"], 2, 1),
|
||||
(["hdf5^zmpi", "mpich"], 4, 1),
|
||||
(["mpi", "zmpi"], 2, 1),
|
||||
(["mpi", "mpich"], 1, 1),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_best_effort_coconcretize(self, specs, expected):
|
||||
def test_best_effort_coconcretize(self, specs, expected, libc_offset):
|
||||
specs = [Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
@@ -1790,7 +1804,9 @@ def test_best_effort_coconcretize(self, specs, expected):
|
||||
for s in result.specs:
|
||||
concrete_specs.update(s.traverse())
|
||||
|
||||
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
|
||||
if not spack.solver.asp.using_libc_compatibility():
|
||||
libc_offset = 0
|
||||
|
||||
assert len(concrete_specs) == expected + libc_offset
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -3043,3 +3059,45 @@ def test_spec_filters(specs, include, exclude, expected):
|
||||
factory=lambda: specs, is_usable=lambda x: True, include=include, exclude=exclude
|
||||
)
|
||||
assert f.selected_specs() == expected
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("clingo only reuse feature being tested")
|
||||
@pytest.mark.regression("38484")
|
||||
def test_git_ref_version_can_be_reused(
|
||||
install_mockery_mutable_config, do_not_check_runtimes_on_reuse
|
||||
):
|
||||
first_spec = spack.spec.Spec("git-ref-package@git.2.1.5=2.1.5~opt").concretized()
|
||||
first_spec.package.do_install(fake=True, explicit=True)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
# reproducer of the issue is that spack will solve when there is a change to the base spec
|
||||
second_spec = spack.spec.Spec("git-ref-package@git.2.1.5=2.1.5+opt").concretized()
|
||||
assert second_spec.dag_hash() != first_spec.dag_hash()
|
||||
# we also want to confirm that reuse actually works so leave variant off to
|
||||
# let solver reuse
|
||||
third_spec = spack.spec.Spec("git-ref-package@git.2.1.5=2.1.5")
|
||||
assert first_spec.satisfies(third_spec)
|
||||
third_spec.concretize()
|
||||
assert third_spec.dag_hash() == first_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("clingo only reuse feature being tested")
|
||||
@pytest.mark.parametrize("standard_version", ["2.0.0", "2.1.5", "2.1.6"])
|
||||
def test_reuse_prefers_standard_over_git_versions(
|
||||
standard_version, install_mockery_mutable_config, do_not_check_runtimes_on_reuse
|
||||
):
|
||||
"""
|
||||
order matters in this test. typically reuse would pick the highest versioned installed match
|
||||
but we want to prefer the standard version over git ref based versions
|
||||
so install git ref last and ensure it is not picked up by reuse
|
||||
"""
|
||||
standard_spec = spack.spec.Spec(f"git-ref-package@{standard_version}").concretized()
|
||||
standard_spec.package.do_install(fake=True, explicit=True)
|
||||
|
||||
git_spec = spack.spec.Spec("git-ref-package@git.2.1.5=2.1.5").concretized()
|
||||
git_spec.package.do_install(fake=True, explicit=True)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
test_spec = spack.spec.Spec("git-ref-package@2").concretized()
|
||||
assert git_spec.dag_hash() != test_spec.dag_hash()
|
||||
assert standard_spec.dag_hash() == test_spec.dag_hash()
|
||||
|
||||
@@ -79,13 +79,13 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
|
||||
[
|
||||
# The reused runtime is older than we need, thus we'll add a more recent one for a
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@9.4.0"}, 2),
|
||||
# The root is compiled with an older compiler, thus we'll reuse the runtime from b
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
|
||||
# The root is compiled with an older compiler, thus we'll NOT reuse the runtime from b
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@9.4.0", "b": "gcc-runtime@9.4.0"}, 1),
|
||||
# Same as before, but tests that we can reuse from a more generic target
|
||||
pytest.param(
|
||||
"a%gcc@9.4.0",
|
||||
"b%gcc@10.2.1 target=x86_64",
|
||||
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@10.2.1 target=x86_64"},
|
||||
{"a": "gcc-runtime@9.4.0", "b": "gcc-runtime@9.4.0"},
|
||||
1,
|
||||
marks=pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
|
||||
@@ -102,13 +102,15 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.regression("44444")
|
||||
def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime, runtime_repo):
|
||||
"""Tests that we can reuse specs with a "gcc-runtime" leaf node. In particular, checks
|
||||
that the semantic for gcc-runtimes versions accounts for reused packages too.
|
||||
|
||||
Reusable runtime versions should be lower, or equal, to that of parent nodes.
|
||||
"""
|
||||
root, reused_spec = _concretize_with_reuse(root_str=root_str, reused_str=reused_str)
|
||||
|
||||
assert f"{expected['b']}" in reused_spec
|
||||
runtime_a = root.dependencies("gcc-runtime")[0]
|
||||
assert runtime_a.satisfies(expected["a"])
|
||||
runtime_b = root["b"].dependencies("gcc-runtime")[0]
|
||||
@@ -123,8 +125,7 @@ def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime
|
||||
[
|
||||
# Ensure that, whether we have multiple runtimes in the DAG or not,
|
||||
# we always link only the latest version
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"])
|
||||
],
|
||||
)
|
||||
def test_views_can_handle_duplicate_runtime_nodes(
|
||||
|
||||
@@ -161,21 +161,24 @@ def test_preferred_providers(self):
|
||||
spec = concretize("mpileaks")
|
||||
assert "zmpi" in spec
|
||||
|
||||
def test_config_set_pkg_property_url(self, mutable_mock_repo):
|
||||
@pytest.mark.parametrize(
|
||||
"update,expected",
|
||||
[
|
||||
(
|
||||
{"url": "http://www.somewhereelse.com/mpileaks-1.0.tar.gz"},
|
||||
"http://www.somewhereelse.com/mpileaks-2.3.tar.gz",
|
||||
),
|
||||
({}, "http://www.llnl.gov/mpileaks-2.3.tar.gz"),
|
||||
],
|
||||
)
|
||||
def test_config_set_pkg_property_url(self, update, expected, mock_repo_path):
|
||||
"""Test setting an existing attribute in the package class"""
|
||||
update_packages(
|
||||
"mpileaks",
|
||||
"package_attributes",
|
||||
{"url": "http://www.somewhereelse.com/mpileaks-1.0.tar.gz"},
|
||||
)
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.fetcher.url == "http://www.somewhereelse.com/mpileaks-2.3.tar.gz"
|
||||
update_packages("mpileaks", "package_attributes", update)
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.fetcher.url == expected
|
||||
|
||||
update_packages("mpileaks", "package_attributes", {})
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.fetcher.url == "http://www.llnl.gov/mpileaks-2.3.tar.gz"
|
||||
|
||||
def test_config_set_pkg_property_new(self, mutable_mock_repo):
|
||||
def test_config_set_pkg_property_new(self, mock_repo_path):
|
||||
"""Test that you can set arbitrary attributes on the Package class"""
|
||||
conf = syaml.load_config(
|
||||
"""\
|
||||
@@ -194,19 +197,20 @@ def test_config_set_pkg_property_new(self, mutable_mock_repo):
|
||||
"""
|
||||
)
|
||||
spack.config.set("packages", conf, scope="concretize")
|
||||
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.v1 == 1
|
||||
assert spec.package.v2 is True
|
||||
assert spec.package.v3 == "yesterday"
|
||||
assert spec.package.v4 == "true"
|
||||
assert dict(spec.package.v5) == {"x": 1, "y": 2}
|
||||
assert list(spec.package.v6) == [1, 2]
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.v1 == 1
|
||||
assert spec.package.v2 is True
|
||||
assert spec.package.v3 == "yesterday"
|
||||
assert spec.package.v4 == "true"
|
||||
assert dict(spec.package.v5) == {"x": 1, "y": 2}
|
||||
assert list(spec.package.v6) == [1, 2]
|
||||
|
||||
update_packages("mpileaks", "package_attributes", {})
|
||||
spec = concretize("mpileaks")
|
||||
with pytest.raises(AttributeError):
|
||||
spec.package.v1
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
spec = concretize("mpileaks")
|
||||
with pytest.raises(AttributeError):
|
||||
spec.package.v1
|
||||
|
||||
def test_preferred(self):
|
||||
""" "Test packages with some version marked as preferred=True"""
|
||||
|
||||
@@ -103,23 +103,6 @@ def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
class MakeStage:
|
||||
def __init__(self, stage):
|
||||
self.stage = stage
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.stage
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_installs(monkeypatch, tmpdir):
|
||||
stage_path = str(tmpdir.ensure("fake-stage", dir=True))
|
||||
universal_unused_stage = spack.stage.DIYStage(stage_path)
|
||||
monkeypatch.setattr(
|
||||
spack.build_systems.generic.Package, "_make_stage", MakeStage(universal_unused_stage)
|
||||
)
|
||||
|
||||
|
||||
def test_one_package_multiple_reqs(concretize_scope, test_repo):
|
||||
conf_str = """\
|
||||
packages:
|
||||
@@ -514,7 +497,7 @@ def test_oneof_ordering(concretize_scope, test_repo):
|
||||
assert s2.satisfies("@2.5")
|
||||
|
||||
|
||||
def test_reuse_oneof(concretize_scope, _create_test_repo, mutable_database, fake_installs):
|
||||
def test_reuse_oneof(concretize_scope, _create_test_repo, mutable_database, mock_fetch):
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
|
||||
@@ -774,7 +774,7 @@ def test_keys_are_ordered(configuration_dir):
|
||||
"./",
|
||||
)
|
||||
|
||||
config_scope = spack.config.ConfigScope("modules", configuration_dir.join("site"))
|
||||
config_scope = spack.config.DirectoryConfigScope("modules", configuration_dir.join("site"))
|
||||
|
||||
data = config_scope.get_section("modules")
|
||||
|
||||
@@ -956,7 +956,7 @@ def test_immutable_scope(tmpdir):
|
||||
root: dummy_tree_value
|
||||
"""
|
||||
)
|
||||
scope = spack.config.ImmutableConfigScope("test", str(tmpdir))
|
||||
scope = spack.config.DirectoryConfigScope("test", str(tmpdir), writable=False)
|
||||
|
||||
data = scope.get_section("config")
|
||||
assert data["config"]["install_tree"] == {"root": "dummy_tree_value"}
|
||||
@@ -966,7 +966,9 @@ def test_immutable_scope(tmpdir):
|
||||
|
||||
|
||||
def test_single_file_scope(config, env_yaml):
|
||||
scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["spack"])
|
||||
scope = spack.config.SingleFileScope(
|
||||
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
|
||||
)
|
||||
|
||||
with spack.config.override(scope):
|
||||
# from the single-file config
|
||||
@@ -1002,7 +1004,9 @@ def test_single_file_scope_section_override(tmpdir, config):
|
||||
"""
|
||||
)
|
||||
|
||||
scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["spack"])
|
||||
scope = spack.config.SingleFileScope(
|
||||
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
|
||||
)
|
||||
|
||||
with spack.config.override(scope):
|
||||
# from the single-file config
|
||||
@@ -1018,7 +1022,7 @@ def test_single_file_scope_section_override(tmpdir, config):
|
||||
def test_write_empty_single_file_scope(tmpdir):
|
||||
env_schema = spack.schema.env.schema
|
||||
scope = spack.config.SingleFileScope(
|
||||
"test", str(tmpdir.ensure("config.yaml")), env_schema, ["spack"]
|
||||
"test", str(tmpdir.ensure("config.yaml")), env_schema, yaml_path=["spack"]
|
||||
)
|
||||
scope._write_section("config")
|
||||
# confirm we can write empty config
|
||||
@@ -1217,7 +1221,9 @@ def test_license_dir_config(mutable_config, mock_packages):
|
||||
|
||||
@pytest.mark.regression("22547")
|
||||
def test_single_file_scope_cache_clearing(env_yaml):
|
||||
scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["spack"])
|
||||
scope = spack.config.SingleFileScope(
|
||||
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
|
||||
)
|
||||
# Check that we can retrieve data from the single file scope
|
||||
before = scope.get_section("config")
|
||||
assert before
|
||||
|
||||
@@ -561,7 +561,7 @@ def _use_test_platform(test_platform):
|
||||
#
|
||||
@pytest.fixture(scope="session")
|
||||
def mock_repo_path():
|
||||
yield spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
yield spack.repo.from_path(spack.paths.mock_packages_path)
|
||||
|
||||
|
||||
def _pkg_install_fn(pkg, spec, prefix):
|
||||
@@ -588,7 +588,7 @@ def mock_packages(mock_repo_path, mock_pkg_install, request):
|
||||
def mutable_mock_repo(mock_repo_path, request):
|
||||
"""Function-scoped mock packages, for tests that need to modify them."""
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
mock_repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
mock_repo = spack.repo.from_path(spack.paths.mock_packages_path)
|
||||
with spack.repo.use_repositories(mock_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
@@ -719,9 +719,9 @@ def _create_mock_configuration_scopes(configuration_dir):
|
||||
"""Create the configuration scopes used in `config` and `mutable_config`."""
|
||||
return [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS),
|
||||
spack.config.ConfigScope("site", str(configuration_dir.join("site"))),
|
||||
spack.config.ConfigScope("system", str(configuration_dir.join("system"))),
|
||||
spack.config.ConfigScope("user", str(configuration_dir.join("user"))),
|
||||
spack.config.DirectoryConfigScope("site", str(configuration_dir.join("site"))),
|
||||
spack.config.DirectoryConfigScope("system", str(configuration_dir.join("system"))),
|
||||
spack.config.DirectoryConfigScope("user", str(configuration_dir.join("user"))),
|
||||
spack.config.InternalConfigScope("command_line"),
|
||||
]
|
||||
|
||||
@@ -755,7 +755,7 @@ def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||
"""Empty configuration that can be modified by the tests."""
|
||||
mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
|
||||
scopes = [
|
||||
spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
spack.config.DirectoryConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ["site", "system", "user"]
|
||||
]
|
||||
|
||||
@@ -790,7 +790,7 @@ def concretize_scope(mutable_config, tmpdir):
|
||||
"""Adds a scope for concretization preferences"""
|
||||
tmpdir.ensure_dir("concretize")
|
||||
mutable_config.push_scope(
|
||||
spack.config.ConfigScope("concretize", str(tmpdir.join("concretize")))
|
||||
spack.config.DirectoryConfigScope("concretize", str(tmpdir.join("concretize")))
|
||||
)
|
||||
|
||||
yield str(tmpdir.join("concretize"))
|
||||
@@ -802,10 +802,10 @@ def concretize_scope(mutable_config, tmpdir):
|
||||
@pytest.fixture
|
||||
def no_compilers_yaml(mutable_config):
|
||||
"""Creates a temporary configuration without compilers.yaml"""
|
||||
for scope, local_config in mutable_config.scopes.items():
|
||||
if not local_config.path: # skip internal scopes
|
||||
for local_config in mutable_config.scopes.values():
|
||||
if not isinstance(local_config, spack.config.DirectoryConfigScope):
|
||||
continue
|
||||
compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
|
||||
compilers_yaml = local_config.get_section_filename("compilers")
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
return mutable_config
|
||||
@@ -814,7 +814,9 @@ def no_compilers_yaml(mutable_config):
|
||||
@pytest.fixture()
|
||||
def mock_low_high_config(tmpdir):
|
||||
"""Mocks two configuration scopes: 'low' and 'high'."""
|
||||
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name))) for name in ["low", "high"]]
|
||||
scopes = [
|
||||
spack.config.DirectoryConfigScope(name, str(tmpdir.join(name))) for name in ["low", "high"]
|
||||
]
|
||||
|
||||
with spack.config.use_configuration(*scopes) as config:
|
||||
yield config
|
||||
@@ -2019,7 +2021,8 @@ def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
with open(str(pkg_file), "w") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.join("cache")))
|
||||
return spack.repo.Repo(repo_path, cache=repo_cache)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -2061,3 +2064,9 @@ def _c_compiler_always_exists():
|
||||
spack.solver.asp.c_compiler_runs = _true
|
||||
yield
|
||||
spack.solver.asp.c_compiler_runs = fn
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mock_test_cache(tmp_path_factory):
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
return spack.util.file_cache.FileCache(str(cache_dir))
|
||||
|
||||
@@ -146,7 +146,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
|
||||
assert not os.path.exists(install_dir)
|
||||
|
||||
|
||||
def test_handle_unknown_package(temporary_store, config, mock_packages):
|
||||
def test_handle_unknown_package(temporary_store, config, mock_packages, tmp_path):
|
||||
"""This test ensures that spack can at least do *some*
|
||||
operations with packages that are installed but that it
|
||||
does not know about. This is actually not such an uncommon
|
||||
@@ -158,7 +158,9 @@ def test_handle_unknown_package(temporary_store, config, mock_packages):
|
||||
or query them again if the package goes away.
|
||||
"""
|
||||
layout = temporary_store.layout
|
||||
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
||||
|
||||
not_in_mock = set.difference(
|
||||
set(spack.repo.all_package_names()), set(mock_db.all_package_names())
|
||||
|
||||
@@ -93,6 +93,21 @@ def test_log_python_output_and_echo_output(capfd, tmpdir):
|
||||
assert capfd.readouterr()[0] == "force echo\n"
|
||||
|
||||
|
||||
def test_log_output_with_control_codes(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log.log_output("foo.txt"):
|
||||
# Print a sample of formatted GCC error output
|
||||
# Line obtained from the file generated by running gcc on a nonexistent file:
|
||||
# gcc -fdiagnostics-color=always ./test.cpp 2>test.log
|
||||
csi = "\x1b["
|
||||
print(
|
||||
f"{csi}01m{csi}Kgcc:{csi}m{csi}K {csi}01;31m{csi}Kerror: {csi}m{csi}K./test.cpp:"
|
||||
)
|
||||
|
||||
with open("foo.txt") as f:
|
||||
assert f.read() == "gcc: error: ./test.cpp:\n"
|
||||
|
||||
|
||||
def _log_filter_fn(string):
|
||||
return string.replace("foo", "bar")
|
||||
|
||||
|
||||
@@ -32,12 +32,12 @@ def test_package_name(self):
|
||||
assert pkg_cls.name == "mpich"
|
||||
|
||||
def test_package_filename(self):
|
||||
repo = spack.repo.Repo(mock_packages_path)
|
||||
repo = spack.repo.from_path(mock_packages_path)
|
||||
filename = repo.filename_for_package_name("mpich")
|
||||
assert filename == os.path.join(mock_packages_path, "packages", "mpich", "package.py")
|
||||
|
||||
def test_nonexisting_package_filename(self):
|
||||
repo = spack.repo.Repo(mock_packages_path)
|
||||
repo = spack.repo.from_path(mock_packages_path)
|
||||
filename = repo.filename_for_package_name("some-nonexisting-package")
|
||||
assert filename == os.path.join(
|
||||
mock_packages_path, "packages", "some-nonexisting-package", "package.py"
|
||||
|
||||
@@ -270,12 +270,9 @@ def trigger_bad_patch(pkg):
|
||||
def test_patch_failure_develop_spec_exits_gracefully(
|
||||
mock_packages, config, install_mockery, mock_fetch, tmpdir, mock_stage
|
||||
):
|
||||
"""
|
||||
ensure that a failing patch does not trigger exceptions
|
||||
for develop specs
|
||||
"""
|
||||
"""ensure that a failing patch does not trigger exceptions for develop specs"""
|
||||
|
||||
spec = Spec("patch-a-dependency " "^libelf dev_path=%s" % str(tmpdir))
|
||||
spec = Spec(f"patch-a-dependency ^libelf dev_path={tmpdir}")
|
||||
spec.concretize()
|
||||
libelf = spec["libelf"]
|
||||
assert "patches" in list(libelf.variants.keys())
|
||||
|
||||
@@ -12,21 +12,28 @@
|
||||
|
||||
|
||||
@pytest.fixture(params=["packages", "", "foo"])
|
||||
def extra_repo(tmpdir_factory, request):
|
||||
def extra_repo(tmp_path_factory, request):
|
||||
repo_namespace = "extra_test_repo"
|
||||
repo_dir = tmpdir_factory.mktemp(repo_namespace)
|
||||
repo_dir.ensure(request.param, dir=True)
|
||||
|
||||
with open(str(repo_dir.join("repo.yaml")), "w") as f:
|
||||
f.write(
|
||||
repo_dir = tmp_path_factory.mktemp(repo_namespace)
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
(repo_dir / request.param).mkdir(parents=True, exist_ok=True)
|
||||
if request.param == "packages":
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
"""
|
||||
repo:
|
||||
namespace: extra_test_repo
|
||||
"""
|
||||
)
|
||||
if request.param != "packages":
|
||||
f.write(f" subdirectory: '{request.param}'")
|
||||
return (spack.repo.Repo(str(repo_dir)), request.param)
|
||||
else:
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
f"""
|
||||
repo:
|
||||
namespace: extra_test_repo
|
||||
subdirectory: '{request.param}'
|
||||
"""
|
||||
)
|
||||
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
||||
|
||||
|
||||
def test_repo_getpkg(mutable_mock_repo):
|
||||
@@ -177,8 +184,11 @@ def test_repo_dump_virtuals(tmpdir, mutable_mock_repo, mock_packages, ensure_deb
|
||||
([spack.paths.mock_packages_path, spack.paths.packages_path], ["builtin.mock", "builtin"]),
|
||||
],
|
||||
)
|
||||
def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths, namespaces):
|
||||
repo_path = spack.repo.RepoPath(*repo_paths)
|
||||
def test_repository_construction_doesnt_use_globals(
|
||||
nullify_globals, tmp_path, repo_paths, namespaces
|
||||
):
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
||||
assert len(repo_path.repos) == len(namespaces)
|
||||
assert [x.namespace for x in repo_path.repos] == namespaces
|
||||
|
||||
@@ -188,8 +198,84 @@ def test_path_computation_with_names(method_name, mock_repo_path):
|
||||
"""Tests that repositories can compute the correct paths when using both fully qualified
|
||||
names and unqualified names.
|
||||
"""
|
||||
repo_path = spack.repo.RepoPath(mock_repo_path)
|
||||
repo_path = spack.repo.RepoPath(mock_repo_path, cache=None)
|
||||
method = getattr(repo_path, method_name)
|
||||
unqualified = method("mpileaks")
|
||||
qualified = method("builtin.mock.mpileaks")
|
||||
assert qualified == unqualified
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("nullify_globals")
|
||||
class TestRepo:
|
||||
"""Test that the Repo class work correctly, and does not depend on globals,
|
||||
except the REPOS_FINDER.
|
||||
"""
|
||||
|
||||
def test_creation(self, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert repo.config_file.endswith("repo.yaml")
|
||||
assert repo.namespace == "builtin.mock"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name,expected", [("mpi", True), ("mpich", False), ("mpileaks", False)]
|
||||
)
|
||||
def test_is_virtual(self, name, expected, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert repo.is_virtual(name) is expected
|
||||
assert repo.is_virtual_safe(name) is expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"module_name,expected",
|
||||
[
|
||||
("dla_future", "dla-future"),
|
||||
("num7zip", "7zip"),
|
||||
# If no package is there, None is returned
|
||||
("unknown", None),
|
||||
],
|
||||
)
|
||||
def test_real_name(self, module_name, expected, mock_test_cache):
|
||||
"""Test that we can correctly compute the 'real' name of a package, from the one
|
||||
used to import the Python module.
|
||||
"""
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert repo.real_name(module_name) == expected
|
||||
|
||||
@pytest.mark.parametrize("name", ["mpileaks", "7zip", "dla-future"])
|
||||
def test_get(self, name, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
mock_spec = spack.spec.Spec(name)
|
||||
mock_spec._mark_concrete()
|
||||
pkg = repo.get(mock_spec)
|
||||
assert pkg.__class__ == repo.get_pkg_class(name)
|
||||
|
||||
@pytest.mark.parametrize("virtual_name,expected", [("mpi", ["mpich", "zmpi"])])
|
||||
def test_providers(self, virtual_name, expected, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
provider_names = {x.name for x in repo.providers_for(virtual_name)}
|
||||
assert provider_names.issuperset(expected)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"extended,expected",
|
||||
[("python", ["py-extension1", "python-venv"]), ("perl", ["perl-extension"])],
|
||||
)
|
||||
def test_extensions(self, extended, expected, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
provider_names = {x.name for x in repo.extensions_for(extended)}
|
||||
assert provider_names.issuperset(expected)
|
||||
|
||||
def test_all_package_names(self, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
all_names = repo.all_package_names(include_virtuals=True)
|
||||
real_names = repo.all_package_names(include_virtuals=False)
|
||||
assert set(all_names).issuperset(real_names)
|
||||
for name in set(all_names) - set(real_names):
|
||||
assert repo.is_virtual(name)
|
||||
assert repo.is_virtual_safe(name)
|
||||
|
||||
def test_packages_with_tags(self, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
r1 = repo.packages_with_tags("tag1")
|
||||
r2 = repo.packages_with_tags("tag1", "tag2")
|
||||
assert "mpich" in r1 and "mpich" in r2
|
||||
assert "mpich2" in r1 and "mpich2" not in r2
|
||||
assert set(r2).issubset(r1)
|
||||
|
||||
@@ -196,21 +196,29 @@ def test_spec_list_matrix_exclude(self, mock_packages):
|
||||
speclist = SpecList("specs", matrix)
|
||||
assert len(speclist.specs) == 1
|
||||
|
||||
@pytest.mark.regression("22991")
|
||||
def test_spec_list_constraints_with_structure(
|
||||
self, mock_packages, mock_fetch, install_mockery
|
||||
):
|
||||
# Setup by getting hash and installing package with dep
|
||||
libdwarf_spec = Spec("libdwarf").concretized()
|
||||
libdwarf_spec.package.do_install()
|
||||
def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_mockery):
|
||||
# Put mpich in the database so it can be referred to by hash.
|
||||
mpich_1 = Spec("mpich+debug").concretized()
|
||||
mpich_2 = Spec("mpich~debug").concretized()
|
||||
mpich_1.package.do_install(fake=True)
|
||||
mpich_2.package.do_install(fake=True)
|
||||
|
||||
# Create matrix
|
||||
matrix = {
|
||||
"matrix": [["mpileaks"], ["^callpath"], ["^libdwarf/%s" % libdwarf_spec.dag_hash()]]
|
||||
}
|
||||
# Create matrix and exclude +debug, which excludes the first mpich after its abstract hash
|
||||
# is resolved.
|
||||
speclist = SpecList(
|
||||
"specs",
|
||||
[
|
||||
{
|
||||
"matrix": [
|
||||
["mpileaks"],
|
||||
["^callpath"],
|
||||
[f"^mpich/{mpich_1.dag_hash(5)}", f"^mpich/{mpich_2.dag_hash(5)}"],
|
||||
],
|
||||
"exclude": ["^mpich+debug"],
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
# ensure the concrete spec was retained in the matrix entry of which
|
||||
# it is a dependency
|
||||
speclist = SpecList("specs", [matrix])
|
||||
# Ensure that only mpich~debug is selected, and that the assembled spec remains abstract.
|
||||
assert len(speclist.specs) == 1
|
||||
assert libdwarf_spec in speclist.specs[0]
|
||||
assert speclist.specs[0] == Spec(f"mpileaks ^callpath ^mpich/{mpich_2.dag_hash(5)}")
|
||||
|
||||
@@ -13,10 +13,12 @@
|
||||
import collections.abc
|
||||
import gzip
|
||||
import inspect
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import ruamel.yaml
|
||||
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
@@ -505,3 +507,50 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
# JSON or YAML file, not a list
|
||||
for edge in s2.traverse_edges():
|
||||
assert isinstance(edge.virtuals, tuple), edge
|
||||
|
||||
|
||||
def test_anchorify_1():
|
||||
"""Test that anchorify replaces duplicate values with references to a single instance, and
|
||||
that that results in anchors in the output YAML."""
|
||||
before = {"a": [1, 2, 3], "b": [1, 2, 3]}
|
||||
after = {"a": [1, 2, 3], "b": [1, 2, 3]}
|
||||
syaml.anchorify(after)
|
||||
assert before == after
|
||||
assert after["a"] is after["b"]
|
||||
|
||||
# Check if anchors are used
|
||||
out = io.StringIO()
|
||||
ruamel.yaml.YAML().dump(after, out)
|
||||
assert (
|
||||
out.getvalue()
|
||||
== """\
|
||||
a: &id001
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
b: *id001
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def test_anchorify_2():
|
||||
before = {"a": {"b": {"c": True}}, "d": {"b": {"c": True}}, "e": {"c": True}}
|
||||
after = {"a": {"b": {"c": True}}, "d": {"b": {"c": True}}, "e": {"c": True}}
|
||||
syaml.anchorify(after)
|
||||
assert before == after
|
||||
assert after["a"] is after["d"]
|
||||
assert after["a"]["b"] is after["e"]
|
||||
|
||||
# Check if anchors are used
|
||||
out = io.StringIO()
|
||||
ruamel.yaml.YAML().dump(after, out)
|
||||
assert (
|
||||
out.getvalue()
|
||||
== """\
|
||||
a: &id001
|
||||
b: &id002
|
||||
c: true
|
||||
d: *id001
|
||||
e: *id002
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
from spack.resource import Resource
|
||||
from spack.stage import DevelopStage, DIYStage, ResourceStage, Stage, StageComposite
|
||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite
|
||||
from spack.util.path import canonicalize_path
|
||||
|
||||
# The following values are used for common fetch and stage mocking fixtures:
|
||||
@@ -146,9 +146,8 @@ def check_destroy(stage, stage_name):
|
||||
assert not os.path.exists(stage_path)
|
||||
|
||||
# tmp stage needs to remove tmp dir too.
|
||||
if not isinstance(stage, DIYStage):
|
||||
target = os.path.realpath(stage_path)
|
||||
assert not os.path.exists(target)
|
||||
target = os.path.realpath(stage_path)
|
||||
assert not os.path.exists(target)
|
||||
|
||||
|
||||
def check_setup(stage, stage_name, archive):
|
||||
@@ -801,62 +800,6 @@ def test_stage_constructor_with_path(self, tmpdir):
|
||||
with Stage("file:///does-not-exist", path=testpath) as stage:
|
||||
assert stage.path == testpath
|
||||
|
||||
def test_diystage_path_none(self):
|
||||
"""Ensure DIYStage for path=None behaves as expected."""
|
||||
with pytest.raises(ValueError):
|
||||
DIYStage(None)
|
||||
|
||||
def test_diystage_path_invalid(self):
|
||||
"""Ensure DIYStage for an invalid path behaves as expected."""
|
||||
with pytest.raises(spack.stage.StagePathError):
|
||||
DIYStage("/path/does/not/exist")
|
||||
|
||||
def test_diystage_path_valid(self, tmpdir):
|
||||
"""Ensure DIYStage for a valid path behaves as expected."""
|
||||
path = str(tmpdir)
|
||||
stage = DIYStage(path)
|
||||
assert stage.path == path
|
||||
assert stage.source_path == path
|
||||
|
||||
# Order doesn't really matter for DIYStage since they are
|
||||
# basically NOOPs; however, call each since they are part
|
||||
# of the normal stage usage and to ensure full test coverage.
|
||||
stage.create() # Only sets the flag value
|
||||
assert stage.created
|
||||
|
||||
stage.cache_local() # Only outputs a message
|
||||
stage.fetch() # Only outputs a message
|
||||
stage.check() # Only outputs a message
|
||||
stage.expand_archive() # Only outputs a message
|
||||
|
||||
assert stage.expanded # The path/source_path does exist
|
||||
|
||||
with pytest.raises(spack.stage.RestageError):
|
||||
stage.restage()
|
||||
|
||||
stage.destroy() # A no-op
|
||||
assert stage.path == path # Ensure can still access attributes
|
||||
assert os.path.exists(stage.source_path) # Ensure path still exists
|
||||
|
||||
def test_diystage_preserve_file(self, tmpdir):
|
||||
"""Ensure DIYStage preserves an existing file."""
|
||||
# Write a file to the temporary directory
|
||||
fn = tmpdir.join(_readme_fn)
|
||||
fn.write(_readme_contents)
|
||||
|
||||
# Instantiate the DIYStage and ensure the above file is unchanged.
|
||||
path = str(tmpdir)
|
||||
stage = DIYStage(path)
|
||||
assert os.path.isdir(path)
|
||||
assert os.path.isfile(str(fn))
|
||||
|
||||
stage.create() # Only sets the flag value
|
||||
|
||||
readmefn = str(fn)
|
||||
assert os.path.isfile(readmefn)
|
||||
with open(readmefn) as _file:
|
||||
_file.read() == _readme_contents
|
||||
|
||||
|
||||
def _create_files_from_tree(base, tree):
|
||||
for name, content in tree.items():
|
||||
|
||||
@@ -173,8 +173,6 @@ def test_fetch(
|
||||
assert "echo Building..." in contents
|
||||
|
||||
|
||||
# TODO-27021
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"spec,url,digest",
|
||||
[
|
||||
@@ -205,7 +203,6 @@ def test_from_list_url(mock_packages, config, spec, url, digest, _fetch_method):
|
||||
assert fetch_strategy.extra_options == {"timeout": 60}
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
@pytest.mark.parametrize(
|
||||
"requested_version,tarball,digest",
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from abc import ABC
|
||||
from collections import defaultdict
|
||||
from typing import Any, Callable, Iterable, List, Optional, Tuple, Union
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Union
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
@@ -13,82 +12,72 @@
|
||||
# Export only the high-level API.
|
||||
__all__ = ["traverse_edges", "traverse_nodes", "traverse_tree"]
|
||||
|
||||
EdgeAndDepth = Tuple["spack.spec.DependencySpec", int]
|
||||
Key = Callable[["spack.spec.Spec"], Any]
|
||||
#: Data class that stores a directed edge together with depth at
|
||||
#: which the target vertex was found. It is passed to ``accept``
|
||||
#: and ``neighbors`` of visitors, so they can decide whether to
|
||||
#: follow the edge or not.
|
||||
EdgeAndDepth = namedtuple("EdgeAndDepth", ["edge", "depth"])
|
||||
|
||||
|
||||
def sort_edges(edges: List["spack.spec.DependencySpec"]) -> List["spack.spec.DependencySpec"]:
|
||||
def sort_edges(edges):
|
||||
edges.sort(key=lambda edge: (edge.spec.name or "", edge.spec.abstract_hash or ""))
|
||||
return edges
|
||||
|
||||
|
||||
class AbstractVisitor(ABC):
|
||||
"""Abstract base class for visitors that traverse the DAG."""
|
||||
class BaseVisitor:
|
||||
"""A simple visitor that accepts all edges unconditionally and follows all
|
||||
edges to dependencies of a given ``deptype``."""
|
||||
|
||||
def accept(self, item: EdgeAndDepth) -> bool:
|
||||
def __init__(self, depflag: dt.DepFlag = dt.ALL):
|
||||
self.depflag = depflag
|
||||
|
||||
def accept(self, item):
|
||||
"""
|
||||
Arguments:
|
||||
item: the edge through which this node was reached at what depth.
|
||||
item (EdgeAndDepth): Provides the depth and the edge through which the
|
||||
node was discovered
|
||||
|
||||
Returns:
|
||||
Iff True, the node is yielded by iterators and dependencies are followed.
|
||||
bool: Returns ``True`` if the node is accepted. When ``False``, this
|
||||
indicates that the node won't be yielded by iterators and dependencies
|
||||
are not followed.
|
||||
"""
|
||||
return True
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
raise NotImplementedError
|
||||
def neighbors(self, item):
|
||||
return sort_edges(item.edge.spec.edges_to_dependencies(depflag=self.depflag))
|
||||
|
||||
|
||||
class AbstractDFSVisitor(AbstractVisitor):
|
||||
"""Abstract base class for visitors that traverse the DAG in depth-first fashion."""
|
||||
|
||||
def pre(self, item: EdgeAndDepth) -> None:
|
||||
pass
|
||||
|
||||
def post(self, item: EdgeAndDepth) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class DefaultVisitor(AbstractVisitor):
|
||||
def __init__(self, depflag: dt.DepFlag = dt.ALL) -> None:
|
||||
self.depflag = depflag
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
return sort_edges(item[0].spec.edges_to_dependencies(depflag=self.depflag))
|
||||
|
||||
|
||||
class ReverseVisitor(AbstractVisitor):
|
||||
class ReverseVisitor:
|
||||
"""A visitor that reverses the arrows in the DAG, following dependents."""
|
||||
|
||||
def __init__(self, visitor: AbstractVisitor, depflag: dt.DepFlag = dt.ALL) -> None:
|
||||
def __init__(self, visitor, depflag: dt.DepFlag = dt.ALL):
|
||||
self.visitor = visitor
|
||||
self.depflag = depflag
|
||||
|
||||
def accept(self, item: EdgeAndDepth) -> bool:
|
||||
def accept(self, item):
|
||||
return self.visitor.accept(item)
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
def neighbors(self, item):
|
||||
"""Return dependents, note that we actually flip the edge direction to allow
|
||||
generic programming"""
|
||||
spec = item[0].spec
|
||||
spec = item.edge.spec
|
||||
return sort_edges(
|
||||
[edge.flip() for edge in spec.edges_from_dependents(depflag=self.depflag)]
|
||||
)
|
||||
|
||||
|
||||
class CoverNodesVisitor(AbstractVisitor):
|
||||
class CoverNodesVisitor:
|
||||
"""A visitor that traverses each node once."""
|
||||
|
||||
def __init__(
|
||||
self, visitor: AbstractVisitor, key: Key = id, visited: Optional[set] = None
|
||||
) -> None:
|
||||
def __init__(self, visitor, key=id, visited=None):
|
||||
self.visitor = visitor
|
||||
self.key = key
|
||||
self.visited = set() if visited is None else visited
|
||||
|
||||
def accept(self, item: EdgeAndDepth) -> bool:
|
||||
def accept(self, item):
|
||||
# Covering nodes means: visit nodes once and only once.
|
||||
key = self.key(item[0].spec)
|
||||
key = self.key(item.edge.spec)
|
||||
|
||||
if key in self.visited:
|
||||
return False
|
||||
@@ -97,26 +86,24 @@ def accept(self, item: EdgeAndDepth) -> bool:
|
||||
self.visited.add(key)
|
||||
return accept
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
def neighbors(self, item):
|
||||
return self.visitor.neighbors(item)
|
||||
|
||||
|
||||
class CoverEdgesVisitor(AbstractVisitor):
|
||||
class CoverEdgesVisitor:
|
||||
"""A visitor that traverses all edges once."""
|
||||
|
||||
def __init__(
|
||||
self, visitor: AbstractVisitor, key: Key = id, visited: Optional[set] = None
|
||||
) -> None:
|
||||
def __init__(self, visitor, key=id, visited=None):
|
||||
self.visitor = visitor
|
||||
self.visited = set() if visited is None else visited
|
||||
self.key = key
|
||||
|
||||
def accept(self, item: EdgeAndDepth) -> bool:
|
||||
def accept(self, item):
|
||||
return self.visitor.accept(item)
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
def neighbors(self, item):
|
||||
# Covering edges means: drop dependencies of visited nodes.
|
||||
key = self.key(item[0].spec)
|
||||
key = self.key(item.edge.spec)
|
||||
|
||||
if key in self.visited:
|
||||
return []
|
||||
@@ -125,7 +112,7 @@ def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
return self.visitor.neighbors(item)
|
||||
|
||||
|
||||
class TopoVisitor(AbstractDFSVisitor):
|
||||
class TopoVisitor:
|
||||
"""Visitor that can be used in :py:func:`depth-first traversal
|
||||
<spack.traverse.traverse_depth_first_with_visitor>` to generate
|
||||
a topologically ordered list of specs.
|
||||
@@ -145,39 +132,42 @@ class TopoVisitor(AbstractDFSVisitor):
|
||||
edges, with the property that for each vertex all in-edges precede all out-edges.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, visitor: AbstractVisitor, key: Key = id, root: bool = True, all_edges: bool = False
|
||||
):
|
||||
def __init__(self, visitor, key=id, root=True, all_edges=False):
|
||||
"""
|
||||
Arguments:
|
||||
visitor: visitor that implements accept(), pre(), post() and neighbors()
|
||||
key: uniqueness key for nodes
|
||||
root: Whether to include the root node.
|
||||
all_edges: when ``False`` (default): Each node is reached once, and
|
||||
``map(lambda edge: edge.spec, visitor.edges)`` is topologically ordered. When
|
||||
``True``, every edge is listed, ordered such that for each node all in-edges
|
||||
precede all out-edges.
|
||||
root (bool): Whether to include the root node.
|
||||
all_edges (bool): when ``False`` (default): Each node is reached once,
|
||||
and ``map(lambda edge: edge.spec, visitor.edges)`` is topologically
|
||||
ordered. When ``True``, every edge is listed, ordered such that for
|
||||
each node all in-edges precede all out-edges.
|
||||
"""
|
||||
self.visited: set = set()
|
||||
self.visited = set()
|
||||
self.visitor = visitor
|
||||
self.key = key
|
||||
self.root = root
|
||||
self.reverse_order: List[spack.spec.DependencySpec] = []
|
||||
self.reverse_order = []
|
||||
self.all_edges = all_edges
|
||||
|
||||
def accept(self, item: EdgeAndDepth) -> bool:
|
||||
if self.key(item[0].spec) not in self.visited:
|
||||
def accept(self, item):
|
||||
if self.key(item.edge.spec) not in self.visited:
|
||||
return True
|
||||
if self.all_edges and (self.root or item[1] > 0):
|
||||
self.reverse_order.append(item[0])
|
||||
if self.all_edges and (self.root or item.depth > 0):
|
||||
self.reverse_order.append(item.edge)
|
||||
return False
|
||||
|
||||
def post(self, item: EdgeAndDepth) -> None:
|
||||
self.visited.add(self.key(item[0].spec))
|
||||
if self.root or item[1] > 0:
|
||||
self.reverse_order.append(item[0])
|
||||
def pre(self, item):
|
||||
# You could add a temporary marker for cycle detection
|
||||
# that's cleared in `post`, but we assume no cycles.
|
||||
pass
|
||||
|
||||
def neighbors(self, item: EdgeAndDepth) -> List["spack.spec.DependencySpec"]:
|
||||
def post(self, item):
|
||||
self.visited.add(self.key(item.edge.spec))
|
||||
if self.root or item.depth > 0:
|
||||
self.reverse_order.append(item.edge)
|
||||
|
||||
def neighbors(self, item):
|
||||
return self.visitor.neighbors(item)
|
||||
|
||||
@property
|
||||
@@ -214,7 +204,7 @@ def get_visitor_from_args(
|
||||
"""
|
||||
if not isinstance(depflag, dt.DepFlag):
|
||||
depflag = dt.canonicalize(depflag)
|
||||
visitor = visitor or DefaultVisitor(depflag)
|
||||
visitor = visitor or BaseVisitor(depflag)
|
||||
if cover == "nodes":
|
||||
visitor = CoverNodesVisitor(visitor, key, visited)
|
||||
elif cover == "edges":
|
||||
@@ -227,40 +217,38 @@ def get_visitor_from_args(
|
||||
def with_artificial_edges(specs):
|
||||
"""Initialize a list of edges from an imaginary root node to the root specs."""
|
||||
return [
|
||||
(spack.spec.DependencySpec(parent=None, spec=s, depflag=0, virtuals=()), 0) for s in specs
|
||||
EdgeAndDepth(
|
||||
edge=spack.spec.DependencySpec(parent=None, spec=s, depflag=0, virtuals=()), depth=0
|
||||
)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
||||
def traverse_depth_first_edges_generator(
|
||||
edges: List[EdgeAndDepth],
|
||||
visitor,
|
||||
post_order: bool = False,
|
||||
root: bool = True,
|
||||
depth: bool = False,
|
||||
):
|
||||
def traverse_depth_first_edges_generator(edges, visitor, post_order=False, root=True, depth=False):
|
||||
"""Generator that takes explores a DAG in depth-first fashion starting from
|
||||
a list of edges. Note that typically DFS would take a vertex not a list of edges,
|
||||
but the API is like this so we don't have to create an artificial root node when
|
||||
traversing from multiple roots in a DAG.
|
||||
|
||||
Arguments:
|
||||
edges: List of EdgeAndDepth instances
|
||||
edges (list): List of EdgeAndDepth instances
|
||||
visitor: class instance implementing accept() and neigbors()
|
||||
post_order: Whether to yield nodes when backtracking
|
||||
root: whether to yield at depth 0
|
||||
depth: when ``True`` yield a tuple of depth and edge, otherwise only the edge.
|
||||
post_order (bool): Whether to yield nodes when backtracking
|
||||
root (bool): whether to yield at depth 0
|
||||
depth (bool): when ``True`` yield a tuple of depth and edge, otherwise only the
|
||||
edge.
|
||||
"""
|
||||
for edge in edges:
|
||||
if not visitor.accept(edge):
|
||||
continue
|
||||
|
||||
yield_me = root or edge[1] > 0
|
||||
yield_me = root or edge.depth > 0
|
||||
|
||||
# Pre
|
||||
if yield_me and not post_order:
|
||||
yield (edge[1], edge[0]) if depth else edge[0]
|
||||
yield (edge.depth, edge.edge) if depth else edge.edge
|
||||
|
||||
neighbors = [(n, edge[1] + 1) for n in visitor.neighbors(edge)]
|
||||
neighbors = [EdgeAndDepth(edge=n, depth=edge.depth + 1) for n in visitor.neighbors(edge)]
|
||||
|
||||
# This extra branch is just for efficiency.
|
||||
if len(neighbors) > 0:
|
||||
@@ -271,12 +259,10 @@ def traverse_depth_first_edges_generator(
|
||||
|
||||
# Post
|
||||
if yield_me and post_order:
|
||||
yield (edge[1], edge[0]) if depth else edge[0]
|
||||
yield (edge.depth, edge.edge) if depth else edge.edge
|
||||
|
||||
|
||||
def traverse_breadth_first_edges_generator(
|
||||
queue: List[EdgeAndDepth], visitor, root: bool = True, depth: bool = False
|
||||
):
|
||||
def traverse_breadth_first_edges_generator(queue, visitor, root=True, depth=False):
|
||||
while len(queue) > 0:
|
||||
edge = queue.pop(0)
|
||||
|
||||
@@ -284,18 +270,18 @@ def traverse_breadth_first_edges_generator(
|
||||
if not visitor.accept(edge):
|
||||
continue
|
||||
|
||||
if root or edge[1] > 0:
|
||||
yield (edge[1], edge[0]) if depth else edge[0]
|
||||
if root or edge.depth > 0:
|
||||
yield (edge.depth, edge.edge) if depth else edge.edge
|
||||
|
||||
for e in visitor.neighbors(edge):
|
||||
queue.append((e, edge[1] + 1))
|
||||
queue.append(EdgeAndDepth(e, edge.depth + 1))
|
||||
|
||||
|
||||
def traverse_breadth_first_with_visitor(specs: List[EdgeAndDepth], visitor: AbstractVisitor):
|
||||
def traverse_breadth_first_with_visitor(specs, visitor):
|
||||
"""Performs breadth first traversal for a list of specs (not a generator).
|
||||
|
||||
Arguments:
|
||||
specs: List of Spec instances.
|
||||
specs (list): List of Spec instances.
|
||||
visitor: object that implements accept and neighbors interface, see
|
||||
for example BaseVisitor.
|
||||
"""
|
||||
@@ -308,21 +294,26 @@ def traverse_breadth_first_with_visitor(specs: List[EdgeAndDepth], visitor: Abst
|
||||
continue
|
||||
|
||||
for e in visitor.neighbors(edge):
|
||||
queue.append((e, edge[1] + 1))
|
||||
queue.append(EdgeAndDepth(e, edge.depth + 1))
|
||||
|
||||
|
||||
def traverse_depth_first_with_visitor(edges: List[EdgeAndDepth], visitor: AbstractDFSVisitor):
|
||||
def traverse_depth_first_with_visitor(edges, visitor):
|
||||
"""Traverse a DAG in depth-first fashion using a visitor, starting from
|
||||
a list of edges. Note that typically DFS would take a vertex not a list of edges,
|
||||
but the API is like this so we don't have to create an artificial root node when
|
||||
traversing from multiple roots in a DAG."""
|
||||
traversing from multiple roots in a DAG.
|
||||
|
||||
Arguments:
|
||||
edges (list): List of EdgeAndDepth instances
|
||||
visitor: class instance implementing accept(), pre(), post() and neighbors()
|
||||
"""
|
||||
for edge in edges:
|
||||
if not visitor.accept(edge):
|
||||
continue
|
||||
|
||||
visitor.pre(edge)
|
||||
|
||||
neighbors = [(e, edge[1] + 1) for e in visitor.neighbors(edge)]
|
||||
neighbors = [EdgeAndDepth(edge=e, depth=edge.depth + 1) for e in visitor.neighbors(edge)]
|
||||
|
||||
traverse_depth_first_with_visitor(neighbors, visitor)
|
||||
|
||||
@@ -332,15 +323,12 @@ def traverse_depth_first_with_visitor(edges: List[EdgeAndDepth], visitor: Abstra
|
||||
# Helper functions for generating a tree using breadth-first traversal
|
||||
|
||||
|
||||
def breadth_first_to_tree_edges(
|
||||
roots: Iterable["spack.spec.Spec"],
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
key: Key = id,
|
||||
):
|
||||
"""This produces an adjacency list (with edges) and a map of parents. There may be nodes that
|
||||
are reached through multiple edges. To print as a tree, one should use the parents dict to
|
||||
verify if the path leading to the node is through the correct parent. If not, the branch should
|
||||
be truncated."""
|
||||
def breadth_first_to_tree_edges(roots, deptype="all", key=id):
|
||||
"""This produces an adjacency list (with edges) and a map of parents.
|
||||
There may be nodes that are reached through multiple edges. To print as
|
||||
a tree, one should use the parents dict to verify if the path leading to
|
||||
the node is through the correct parent. If not, the branch should be
|
||||
truncated."""
|
||||
edges = defaultdict(list)
|
||||
parents = dict()
|
||||
|
||||
@@ -354,11 +342,7 @@ def breadth_first_to_tree_edges(
|
||||
return edges, parents
|
||||
|
||||
|
||||
def breadth_first_to_tree_nodes(
|
||||
roots: Iterable["spack.spec.Spec"],
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
key: Key = id,
|
||||
):
|
||||
def breadth_first_to_tree_nodes(roots, deptype="all", key=id):
|
||||
"""This produces a list of edges that forms a tree; every node has no more
|
||||
that one incoming edge."""
|
||||
edges = defaultdict(list)
|
||||
@@ -371,8 +355,8 @@ def breadth_first_to_tree_nodes(
|
||||
|
||||
|
||||
def traverse_breadth_first_tree_edges(parent_id, edges, parents, key=id, depth=0):
|
||||
"""Do a depth-first search on edges generated by breadth-first traversal, which can be used to
|
||||
produce a tree."""
|
||||
"""Do a depth-first search on edges generated by bread-first traversal,
|
||||
which can be used to produce a tree."""
|
||||
for edge in edges[parent_id]:
|
||||
yield (depth, edge)
|
||||
|
||||
@@ -382,23 +366,26 @@ def traverse_breadth_first_tree_edges(parent_id, edges, parents, key=id, depth=0
|
||||
if parents[child_id] != parent_id:
|
||||
continue
|
||||
|
||||
yield from traverse_breadth_first_tree_edges(child_id, edges, parents, key, depth + 1)
|
||||
# yield from ... in Python 3.
|
||||
for item in traverse_breadth_first_tree_edges(child_id, edges, parents, key, depth + 1):
|
||||
yield item
|
||||
|
||||
|
||||
def traverse_breadth_first_tree_nodes(parent_id, edges, key=id, depth=0):
|
||||
for edge in edges[parent_id]:
|
||||
yield (depth, edge)
|
||||
yield from traverse_breadth_first_tree_nodes(key(edge.spec), edges, key, depth + 1)
|
||||
for item in traverse_breadth_first_tree_nodes(key(edge.spec), edges, key, depth + 1):
|
||||
yield item
|
||||
|
||||
|
||||
# Topologic order
|
||||
def traverse_edges_topo(
|
||||
specs: Iterable["spack.spec.Spec"],
|
||||
direction: str = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
key: Key = id,
|
||||
root: bool = True,
|
||||
all_edges: bool = False,
|
||||
specs,
|
||||
direction="children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
key=id,
|
||||
root=True,
|
||||
all_edges=False,
|
||||
):
|
||||
"""
|
||||
Returns a list of edges in topological order, in the sense that all in-edges of a
|
||||
@@ -407,49 +394,50 @@ def traverse_edges_topo(
|
||||
directed from dependency to dependent.
|
||||
|
||||
Arguments:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
direction: ``children`` (edges are directed from dependent to dependency)
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
direction (str): ``children`` (edges are directed from dependent to dependency)
|
||||
or ``parents`` (edges are flipped / directed from dependency to dependent)
|
||||
deptype: allowed dependency types
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
root: Yield the root nodes themselves
|
||||
all_edges: When ``False`` only one in-edge per node is returned, when ``True`` all
|
||||
reachable edges are returned.
|
||||
root (bool): Yield the root nodes themselves
|
||||
all_edges (bool): When ``False`` only one in-edge per node is returned, when
|
||||
``True`` all reachable edges are returned.
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
default = DefaultVisitor(deptype)
|
||||
with_dir = ReverseVisitor(default, deptype) if direction == "parents" else default
|
||||
topo = TopoVisitor(with_dir, key=key, root=root, all_edges=all_edges)
|
||||
traverse_depth_first_with_visitor(with_artificial_edges(specs), topo)
|
||||
return topo.edges
|
||||
visitor: Union[BaseVisitor, ReverseVisitor, TopoVisitor] = BaseVisitor(deptype)
|
||||
if direction == "parents":
|
||||
visitor = ReverseVisitor(visitor, deptype)
|
||||
visitor = TopoVisitor(visitor, key=key, root=root, all_edges=all_edges)
|
||||
traverse_depth_first_with_visitor(with_artificial_edges(specs), visitor)
|
||||
return visitor.edges
|
||||
|
||||
|
||||
# High-level API: traverse_edges, traverse_nodes, traverse_tree.
|
||||
|
||||
|
||||
def traverse_edges(
|
||||
specs: Iterable["spack.spec.Spec"],
|
||||
root: bool = True,
|
||||
order: str = "pre",
|
||||
cover: str = "nodes",
|
||||
direction: str = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
depth: bool = False,
|
||||
key: Key = id,
|
||||
visited: Optional[set] = None,
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
"""
|
||||
Generator that yields edges from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
For topological order use ``topo``
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
@@ -457,15 +445,15 @@ def traverse_edges(
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth: When ``False``, yield just edges. When ``True`` yield
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple (depth, edge), where depth corresponds to the depth
|
||||
at which edge.spec was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited: a set of nodes not to follow
|
||||
visited (set or None): a set of nodes not to follow
|
||||
|
||||
Returns:
|
||||
A generator that yields ``DependencySpec`` if depth is ``False``
|
||||
@@ -494,29 +482,29 @@ def traverse_edges(
|
||||
elif order == "breadth":
|
||||
return traverse_breadth_first_edges_generator(root_edges, visitor, root, depth)
|
||||
|
||||
raise ValueError(f"Unknown order {order}")
|
||||
raise ValueError("Unknown order {}".format(order))
|
||||
|
||||
|
||||
def traverse_nodes(
|
||||
specs: Iterable["spack.spec.Spec"],
|
||||
root: bool = True,
|
||||
order: str = "pre",
|
||||
cover: str = "nodes",
|
||||
direction: str = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
depth: bool = False,
|
||||
key: Key = id,
|
||||
visited: Optional[set] = None,
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
"""
|
||||
Generator that yields specs from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
@@ -524,15 +512,15 @@ def traverse_nodes(
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth: When ``False``, yield just edges. When ``True`` yield
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple ``(depth, edge)``, where depth corresponds to the depth
|
||||
at which ``edge.spec`` was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited: a set of nodes not to follow
|
||||
visited (set or None): a set of nodes not to follow
|
||||
|
||||
Yields:
|
||||
By default :class:`~spack.spec.Spec`, or a tuple ``(depth, Spec)`` if depth is
|
||||
@@ -543,11 +531,7 @@ def traverse_nodes(
|
||||
|
||||
|
||||
def traverse_tree(
|
||||
specs: Iterable["spack.spec.Spec"],
|
||||
cover: str = "nodes",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
key: Key = id,
|
||||
depth_first: bool = True,
|
||||
specs, cover="nodes", deptype: Union[dt.DepFlag, dt.DepTypes] = "all", key=id, depth_first=True
|
||||
):
|
||||
"""
|
||||
Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first
|
||||
@@ -555,8 +539,8 @@ def traverse_tree(
|
||||
|
||||
Arguments:
|
||||
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
@@ -566,7 +550,7 @@ def traverse_tree(
|
||||
times if they're reachable by multiple paths.
|
||||
deptype: allowed dependency types
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
depth_first: Explore the tree in depth-first or breadth-first order.
|
||||
depth_first (bool): Explore the tree in depth-first or breadth-first order.
|
||||
When setting ``depth_first=True`` and ``cover=nodes``, each spec only
|
||||
occurs once at the shallowest level, which is useful when rendering
|
||||
the tree in a terminal.
|
||||
|
||||
@@ -195,14 +195,12 @@ def reproducible_tarfile_from_prefix(
|
||||
file_info = tarfile.TarInfo(path_to_name(entry.path))
|
||||
|
||||
if entry.is_symlink():
|
||||
# strip off long path reg prefix on Windows
|
||||
link_dest = readlink(entry.path)
|
||||
file_info.linkname = link_dest
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = readlink(entry.path)
|
||||
# According to POSIX: "the value of the file mode bits returned in the
|
||||
# st_mode field of the stat structure is unspecified." So we set it to
|
||||
# something sensible without lstat'ing the link.
|
||||
file_info.mode = 0o755
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
|
||||
@@ -679,8 +679,8 @@ def shell_modifications(
|
||||
for modifier in actions:
|
||||
modifier.execute(new_env)
|
||||
|
||||
if "MANPATH" in new_env and not new_env["MANPATH"].endswith(":"):
|
||||
new_env["MANPATH"] += ":"
|
||||
if "MANPATH" in new_env and not new_env["MANPATH"].endswith(os.pathsep):
|
||||
new_env["MANPATH"] += os.pathsep
|
||||
|
||||
cmds = ""
|
||||
|
||||
|
||||
@@ -128,9 +128,9 @@ def startfile_prefix(prefix: str, compatible_with: str = sys.executable) -> Opti
|
||||
except Exception:
|
||||
accept = lambda path: True
|
||||
|
||||
queue = [(0, prefix)]
|
||||
while queue:
|
||||
depth, path = queue.pop()
|
||||
stack = [(0, prefix)]
|
||||
while stack:
|
||||
depth, path = stack.pop()
|
||||
try:
|
||||
iterator = os.scandir(path)
|
||||
except OSError:
|
||||
@@ -140,7 +140,7 @@ def startfile_prefix(prefix: str, compatible_with: str = sys.executable) -> Opti
|
||||
try:
|
||||
if entry.is_dir(follow_symlinks=True):
|
||||
if depth < 2:
|
||||
queue.append((depth + 1, entry.path))
|
||||
stack.append((depth + 1, entry.path))
|
||||
elif entry.name == "crt1.o" and accept(entry.path):
|
||||
return path
|
||||
except Exception:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
import functools
|
||||
import io
|
||||
import re
|
||||
from typing import IO, List, Optional
|
||||
from typing import IO, Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
import ruamel.yaml
|
||||
from ruamel.yaml import comments, constructor, emitter, error, representer
|
||||
@@ -233,8 +233,8 @@ def wrapper(data, stream=None, **kwargs):
|
||||
@return_string_when_no_stream
|
||||
def dump(data, stream=None, default_flow_style=False):
|
||||
handler = ConfigYAML(yaml_type=YAMLType.GENERIC_YAML)
|
||||
handler.default_flow_style = default_flow_style
|
||||
handler.width = maxint
|
||||
handler.yaml.default_flow_style = default_flow_style
|
||||
handler.yaml.width = maxint
|
||||
return handler.dump(data, stream=stream)
|
||||
|
||||
|
||||
@@ -493,6 +493,29 @@ def name_mark(name):
|
||||
return error.StringMark(name, None, None, None, None, None)
|
||||
|
||||
|
||||
def anchorify(data: Union[dict, list], identifier: Callable[[Any], str] = repr) -> None:
|
||||
"""Replace identical dict/list branches in tree with references to earlier instances. The YAML
|
||||
serializer generate anchors for them, resulting in small yaml files."""
|
||||
anchors: Dict[str, Union[dict, list]] = {}
|
||||
stack: List[Union[dict, list]] = [data]
|
||||
|
||||
while stack:
|
||||
item = stack.pop()
|
||||
|
||||
for key, value in item.items() if isinstance(item, dict) else enumerate(item):
|
||||
if not isinstance(value, (dict, list)):
|
||||
continue
|
||||
|
||||
id = identifier(value)
|
||||
anchor = anchors.get(id)
|
||||
|
||||
if anchor is None:
|
||||
anchors[id] = value
|
||||
stack.append(value)
|
||||
else:
|
||||
item[key] = anchor # replace with reference
|
||||
|
||||
|
||||
class SpackYAMLError(spack.error.SpackError):
|
||||
"""Raised when there are issues with YAML parsing."""
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
stages: [ "generate", "build", "publish" ]
|
||||
stages: [ "generate", "build" ]
|
||||
|
||||
variables:
|
||||
SPACK_DISABLE_LOCAL_CONFIG: "1"
|
||||
@@ -187,7 +187,7 @@ default:
|
||||
|
||||
.generate-x86_64:
|
||||
extends: [ ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
tags: ["spack", "public", "medium", "x86_64_v3"]
|
||||
|
||||
.generate-aarch64:
|
||||
extends: [ ".generate-base" ]
|
||||
@@ -259,36 +259,6 @@ default:
|
||||
extends: [ ".base-job" ]
|
||||
stage: build
|
||||
|
||||
protected-publish:
|
||||
# Copy binaries from stack-specific mirrors to a root mirror
|
||||
stage: publish
|
||||
only:
|
||||
- /^develop$/
|
||||
- /^releases\/v.*/
|
||||
- /^v.*/
|
||||
- /^develop-[\d]{4}-[\d]{2}-[\d]{2}$/
|
||||
image: "ghcr.io/spack/python-aws-bash:0.0.1"
|
||||
tags: ["spack", "public", "medium", "aws", "x86_64"]
|
||||
retry:
|
||||
max: 2
|
||||
when: ["runner_system_failure", "stuck_or_timeout_failure"]
|
||||
variables:
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- export COPY_SPECS_DIR=${CI_PROJECT_DIR}/jobs_scratch_dir/specs_to_copy
|
||||
- spack buildcache sync --manifest-glob "${COPY_SPECS_DIR}/*.json"
|
||||
- curl -fLsS https://spack.github.io/keys/spack-public-binary-key.pub -o /tmp/spack-public-binary-key.pub
|
||||
- aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_COPY_BUILDCACHE}/build_cache/_pgp/spack-public-binary-key.pub"
|
||||
- spack buildcache update-index --keys "${SPACK_COPY_BUILDCACHE}"
|
||||
id_tokens:
|
||||
GITLAB_OIDC_TOKEN:
|
||||
aud: "protected_binary_mirror"
|
||||
|
||||
########################################
|
||||
# TEMPLATE FOR ADDING ANOTHER PIPELINE
|
||||
########################################
|
||||
@@ -406,7 +376,7 @@ e4s-neoverse_v1-build:
|
||||
|
||||
e4s-rocm-external-generate:
|
||||
extends: [ ".e4s-rocm-external", ".generate-x86_64"]
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm5.7.1:2024.03.01
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm6.1.1:2024.06.23
|
||||
|
||||
e4s-rocm-external-build:
|
||||
extends: [ ".e4s-rocm-external", ".build" ]
|
||||
@@ -452,7 +422,7 @@ e4s-rocm-external-build:
|
||||
|
||||
e4s-oneapi-generate:
|
||||
extends: [ ".e4s-oneapi", ".generate-x86_64"]
|
||||
image: ghcr.io/spack/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-oneapi-2024.2:2024.06.21
|
||||
|
||||
e4s-oneapi-build:
|
||||
extends: [ ".e4s-oneapi", ".build" ]
|
||||
@@ -513,28 +483,6 @@ build_systems-build:
|
||||
- artifacts: True
|
||||
job: build_systems-generate
|
||||
|
||||
###########################################
|
||||
# Build tests for different developer tools
|
||||
###########################################
|
||||
.developer-tools:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: developer-tools
|
||||
|
||||
developer-tools-generate:
|
||||
extends: [ ".developer-tools", ".generate-x86_64"]
|
||||
|
||||
developer-tools-build:
|
||||
extends: [ ".developer-tools", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: developer-tools-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: developer-tools-generate
|
||||
|
||||
###########################################
|
||||
# Build tests for different developer tools
|
||||
# manylinux2014
|
||||
|
||||
@@ -4,22 +4,22 @@ spack:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
@@ -39,13 +39,17 @@ spack:
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version: [1.18]
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
@@ -108,8 +112,8 @@ spack:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- 'target=aarch64'
|
||||
- 'target=neoverse_n1'
|
||||
- target=aarch64
|
||||
- target=neoverse_n1
|
||||
|
||||
|
||||
specs:
|
||||
@@ -132,7 +136,7 @@ spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
||||
|
||||
@@ -4,22 +4,22 @@ spack:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
@@ -39,13 +39,17 @@ spack:
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version: [1.18]
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
@@ -115,7 +119,7 @@ spack:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- 'target=x86_64_v3'
|
||||
- target=x86_64_v3
|
||||
|
||||
|
||||
specs:
|
||||
@@ -143,7 +147,7 @@ spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
||||
|
||||
@@ -19,8 +19,6 @@ packages:
|
||||
- "+intel_provided_gcc ^intel-oneapi-mkl target=x86_64_v4"
|
||||
- "+intel_provided_gcc ^intel-oneapi-mkl target=x86_64_v3"
|
||||
when: "%intel"
|
||||
intel-mpi:
|
||||
variants: +external-libfabric
|
||||
intel-oneapi-compilers:
|
||||
require: "intel-oneapi-compilers %gcc target=x86_64_v3"
|
||||
intel-oneapi-mpi:
|
||||
@@ -110,12 +108,15 @@ packages:
|
||||
read: world
|
||||
write: user
|
||||
providers:
|
||||
blas: [intel-oneapi-mkl, intel-mkl]
|
||||
daal: [intel-oneapi-dal, intel-daal]
|
||||
fftw-api: [intel-oneapi-mkl, intel-mkl]
|
||||
ipp: [intel-oneapi-ipp, intel-ipp]
|
||||
lapack: [intel-oneapi-mkl, intel-mkl]
|
||||
mkl: [intel-oneapi-mkl, intel-mkl]
|
||||
blas: [intel-oneapi-mkl]
|
||||
daal: [intel-oneapi-dal]
|
||||
fftw-api: [intel-oneapi-mkl]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
lapack: [intel-oneapi-mkl]
|
||||
mkl: [intel-oneapi-mkl]
|
||||
mpi: [intel-oneapi-mpi, openmpi, mpich]
|
||||
tbb: [intel-oneapi-tbb, intel-tbb]
|
||||
scalapack: [intel-oneapi-mkl, intel-mkl]
|
||||
scalapack: [intel-oneapi-mkl]
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v3
|
||||
concretizer:
|
||||
unify: true
|
||||
definitions:
|
||||
- default_specs:
|
||||
# editors
|
||||
- neovim~no_luajit
|
||||
- py-pynvim
|
||||
- emacs@29.1+json+native+treesitter # note, pulls in gcc
|
||||
# - tree-sitter is a dep, should also have cli but no package
|
||||
- nano # just in case
|
||||
# tags and scope search helpers
|
||||
- universal-ctags # only maintained ctags, works better with c++
|
||||
- direnv
|
||||
# runtimes and compilers
|
||||
- python
|
||||
- llvm+link_llvm_dylib~lld~lldb~polly+python build_type=MinSizeRel # for clangd, clang-format
|
||||
- node-js # for editor plugins etc., pyright language server
|
||||
- npm
|
||||
- go # to build fzf, gh, hub
|
||||
- rust+dev # fd, ripgrep, hyperfine, exa, rust-analyzer
|
||||
- binutils+ld+gold+plugins # support linking with built gcc
|
||||
# styling and lints
|
||||
- astyle
|
||||
- cppcheck
|
||||
- uncrustify
|
||||
- py-fprettify
|
||||
- py-fortran-language-server
|
||||
- py-python-lsp-server
|
||||
# cli dev tools
|
||||
- ripgrep
|
||||
- gh
|
||||
- fd
|
||||
- bfs
|
||||
- fzf
|
||||
- tree
|
||||
- jq
|
||||
- py-yq
|
||||
- hub
|
||||
- ncdu
|
||||
- eza
|
||||
- lsd
|
||||
- hyperfine
|
||||
- htop
|
||||
- tmux
|
||||
- ccache
|
||||
# ensure we can use a jobserver build and do this fast
|
||||
- gmake
|
||||
- ninja # should be @kitware, can't be because of meson requirement
|
||||
- "openssl certs=system" # must be this, system external does not work
|
||||
- arch:
|
||||
- '%gcc target=x86_64_v3'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $default_specs
|
||||
- - $arch
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01"
|
||||
|
||||
cdash:
|
||||
build-group: Developer Tools
|
||||
@@ -34,7 +34,7 @@ spack:
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc'
|
||||
require: '~wrapperrpath ~hwloc %gcc target=neoverse_v2'
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
boost:
|
||||
@@ -43,11 +43,13 @@ spack:
|
||||
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
|
||||
cxxstd=17 visibility=global
|
||||
libffi:
|
||||
require: "@3.4.4"
|
||||
require: "@3.4.4 %gcc target=neoverse_v2"
|
||||
vtk-m:
|
||||
require: "+examples"
|
||||
require: "+examples %gcc target=neoverse_v2"
|
||||
cuda:
|
||||
version: [11.8.0]
|
||||
paraview:
|
||||
require: "+examples %gcc target=neoverse_v2"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
@@ -180,27 +182,6 @@ spack:
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2'
|
||||
# - variorum # variorum: https://github.com/spack/spack/issues/38786
|
||||
|
||||
# PYTHON PACKAGES
|
||||
# - opencv +python3
|
||||
# - py-horovod
|
||||
# - py-jax
|
||||
# - py-jupyterlab
|
||||
# - py-matplotlib
|
||||
# - py-mpi4py
|
||||
# - py-notebook
|
||||
# - py-numba
|
||||
# - py-numpy
|
||||
# - py-openai
|
||||
# - py-pandas
|
||||
# - py-plotly
|
||||
# - py-pooch
|
||||
# - py-pytest
|
||||
# - py-scikit-learn
|
||||
# - py-scipy
|
||||
# - py-seaborn
|
||||
# - py-tensorflow
|
||||
# - py-torch
|
||||
|
||||
# CUDA NOARCH
|
||||
- flux-core +cuda
|
||||
- hpctoolkit +cuda
|
||||
@@ -216,7 +197,6 @@ spack:
|
||||
- cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90
|
||||
- caliper +cuda cuda_arch=90
|
||||
- chai +cuda cuda_arch=90 ^umpire ~shared
|
||||
# - cp2k +mpi +cuda cuda_arch=90 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
- flecsi +cuda cuda_arch=90
|
||||
- ginkgo +cuda cuda_arch=90
|
||||
- gromacs +cuda cuda_arch=90
|
||||
@@ -238,12 +218,13 @@ spack:
|
||||
- umpire ~shared +cuda cuda_arch=90
|
||||
# INCLUDED IN ECP DAV CUDA
|
||||
- adios2 +cuda cuda_arch=90
|
||||
# - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90
|
||||
- vtk-m +cuda cuda_arch=90
|
||||
- zfp +cuda cuda_arch=90
|
||||
# --
|
||||
# - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=90 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
# - cusz +cuda cuda_arch=90 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532
|
||||
# - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # embree: https://github.com/spack/spack/issues/39534
|
||||
|
||||
@@ -34,7 +34,7 @@ spack:
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc'
|
||||
require: '~wrapperrpath ~hwloc %gcc target=neoverse_v1'
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
boost:
|
||||
@@ -43,9 +43,11 @@ spack:
|
||||
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
|
||||
cxxstd=17 visibility=global
|
||||
libffi:
|
||||
require: "@3.4.4"
|
||||
require: "@3.4.4 %gcc target=neoverse_v1"
|
||||
vtk-m:
|
||||
require: "+examples"
|
||||
require: "+examples %gcc target=neoverse_v1"
|
||||
paraview:
|
||||
require: "+examples %gcc target=neoverse_v1"
|
||||
cuda:
|
||||
version: [11.8.0]
|
||||
|
||||
@@ -57,7 +59,6 @@ spack:
|
||||
- amrex
|
||||
- arborx
|
||||
- argobots
|
||||
- ascent # ecp dav
|
||||
- axom
|
||||
- bolt
|
||||
- boost
|
||||
@@ -158,6 +159,7 @@ spack:
|
||||
- xyce +mpi +shared +pymi +pymi_static_tpls
|
||||
# INCLUDED IN ECP DAV CPU
|
||||
- adios2
|
||||
- ascent
|
||||
- darshan-runtime
|
||||
- darshan-util
|
||||
- faodel
|
||||
@@ -169,16 +171,16 @@ spack:
|
||||
- sz
|
||||
- unifyfs
|
||||
- veloc
|
||||
# - visit # silo: https://github.com/spack/spack/issues/39538
|
||||
# - visit # silo: https://github.com/spack/spack/issues/39538
|
||||
- vtk-m
|
||||
- zfp
|
||||
# --
|
||||
# - bricks ~cuda # not respecting target=aarch64?
|
||||
# - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - geopm # geopm: https://github.com/spack/spack/issues/38795
|
||||
# - glvis # glvis: https://github.com/spack/spack/issues/42839
|
||||
# - bricks ~cuda # not respecting target=aarch64?
|
||||
# - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - geopm # geopm: https://github.com/spack/spack/issues/38795
|
||||
# - glvis # glvis: https://github.com/spack/spack/issues/42839
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2'
|
||||
# - variorum # variorum: https://github.com/spack/spack/issues/38786
|
||||
# - variorum # variorum: https://github.com/spack/spack/issues/38786
|
||||
|
||||
# PYTHON PACKAGES
|
||||
- opencv +python3
|
||||
@@ -207,8 +209,8 @@ spack:
|
||||
- papi +cuda
|
||||
- tau +mpi +cuda +syscall
|
||||
# --
|
||||
# - bricks +cuda # not respecting target=aarch64?
|
||||
# - legion +cuda # legion: needs NVIDIA driver
|
||||
# - bricks +cuda # not respecting target=aarch64?
|
||||
# - legion +cuda # legion: needs NVIDIA driver
|
||||
|
||||
# CUDA 75
|
||||
- amrex +cuda cuda_arch=75
|
||||
@@ -216,7 +218,6 @@ spack:
|
||||
- cabana +cuda cuda_arch=75 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=75
|
||||
- caliper +cuda cuda_arch=75
|
||||
- chai +cuda cuda_arch=75 ^umpire ~shared
|
||||
# - cp2k +mpi +cuda cuda_arch=75 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
- flecsi +cuda cuda_arch=75
|
||||
- ginkgo +cuda cuda_arch=75
|
||||
- gromacs +cuda cuda_arch=75
|
||||
@@ -241,21 +242,22 @@ spack:
|
||||
- umpire ~shared +cuda cuda_arch=75
|
||||
# INCLUDED IN ECP DAV CUDA
|
||||
- adios2 +cuda cuda_arch=75
|
||||
# - ascent +cuda cuda_arch=75 # ascent: https://github.com/spack/spack/issues/38045
|
||||
- vtk-m +cuda cuda_arch=75
|
||||
- zfp +cuda cuda_arch=75
|
||||
# --
|
||||
# - ascent +cuda cuda_arch=75 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - axom +cuda cuda_arch=75 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cusz +cuda cuda_arch=75 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - paraview +cuda cuda_arch=75 # Error building some cuda componets in paraview
|
||||
# - axom +cuda cuda_arch=75 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=75 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
# - cusz +cuda cuda_arch=75 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=75 # embree: https://github.com/spack/spack/issues/39534
|
||||
# - lammps +cuda cuda_arch=75 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=75 # lbann: https://github.com/spack/spack/issues/38788
|
||||
# - lammps +cuda cuda_arch=75 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=75 # lbann: https://github.com/spack/spack/issues/38788
|
||||
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=75 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787
|
||||
# - py-torch +cuda cuda_arch=75 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - upcxx +cuda cuda_arch=75 # upcxx: needs NVIDIA driver
|
||||
# - paraview +cuda cuda_arch=75 # Error building some cuda componets in paraview
|
||||
# - py-torch +cuda cuda_arch=75 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - upcxx +cuda cuda_arch=75 # upcxx: needs NVIDIA driver
|
||||
|
||||
# CUDA 80
|
||||
- amrex +cuda cuda_arch=80
|
||||
@@ -263,7 +265,6 @@ spack:
|
||||
- cabana +cuda cuda_arch=80 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=80
|
||||
- caliper +cuda cuda_arch=80
|
||||
- chai +cuda cuda_arch=80 ^umpire ~shared
|
||||
# - cp2k +mpi +cuda cuda_arch=80 # cp2k: Error: KeyError: 'Point environment variable LIBSMM_PATH to the absolute path of the libsmm.a file'
|
||||
- flecsi +cuda cuda_arch=80
|
||||
- ginkgo +cuda cuda_arch=80
|
||||
- gromacs +cuda cuda_arch=80
|
||||
@@ -287,22 +288,23 @@ spack:
|
||||
- trilinos +cuda cuda_arch=80
|
||||
- umpire ~shared +cuda cuda_arch=80
|
||||
# INCLUDED IN ECP DAV CUDA
|
||||
# - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045
|
||||
- adios2 +cuda cuda_arch=80
|
||||
- vtk-m +cuda cuda_arch=80
|
||||
- zfp +cuda cuda_arch=80
|
||||
# --
|
||||
# - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cusz +cuda cuda_arch=80 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - paraview +cuda cuda_arch=80 # Error building some cuda componets in paraview
|
||||
# - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=80 # cp2k: Error: KeyError: 'Point environment variable LIBSMM_PATH to the absolute path of the libsmm.a file'
|
||||
# - cusz +cuda cuda_arch=80 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # embree: https://github.com/spack/spack/issues/39534
|
||||
# - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788
|
||||
# - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788
|
||||
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=80 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787
|
||||
# - py-torch +cuda cuda_arch=80 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver
|
||||
# - paraview +cuda cuda_arch=80 # Error building some cuda componets in paraview
|
||||
# - py-torch +cuda cuda_arch=80 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver
|
||||
|
||||
# CUDA 90
|
||||
- amrex +cuda cuda_arch=90
|
||||
@@ -310,7 +312,6 @@ spack:
|
||||
- cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90
|
||||
- caliper +cuda cuda_arch=90
|
||||
- chai +cuda cuda_arch=90 ^umpire ~shared
|
||||
# - cp2k +mpi +cuda cuda_arch=90 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
- flecsi +cuda cuda_arch=90
|
||||
- ginkgo +cuda cuda_arch=90
|
||||
- gromacs +cuda cuda_arch=90
|
||||
@@ -332,24 +333,25 @@ spack:
|
||||
- umpire ~shared +cuda cuda_arch=90
|
||||
# INCLUDED IN ECP DAV CUDA
|
||||
- adios2 +cuda cuda_arch=90
|
||||
# - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90
|
||||
# - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90
|
||||
- vtk-m +cuda cuda_arch=90
|
||||
- zfp +cuda cuda_arch=90
|
||||
# --
|
||||
# - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cusz +cuda cuda_arch=90 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532
|
||||
# - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
|
||||
# - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=90 # cp2k: cp2k only supports cuda_arch ('35', '37', '60', '70', '80')
|
||||
# - cusz +cuda cuda_arch=90 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532
|
||||
# - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # embree: https://github.com/spack/spack/issues/39534
|
||||
# - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12:
|
||||
# - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann"
|
||||
# - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12:
|
||||
# - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann"
|
||||
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=90 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787
|
||||
# - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535
|
||||
# - py-torch +cuda cuda_arch=90 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=90 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12
|
||||
# - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver
|
||||
# - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535
|
||||
# - py-torch +cuda cuda_arch=90 # skipped, installed by other means
|
||||
# - slepc +cuda cuda_arch=90 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
|
||||
# - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12
|
||||
# - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
||||
@@ -19,9 +19,9 @@ spack:
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
gcc-runtime:
|
||||
require: "%gcc"
|
||||
require: "%gcc target=x86_64_v3"
|
||||
hdf5:
|
||||
require: "%gcc"
|
||||
require: "%gcc target=x86_64_v3"
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
@@ -39,33 +39,36 @@ spack:
|
||||
xz:
|
||||
variants: +pic
|
||||
mpi:
|
||||
require: 'mpich@4:'
|
||||
require: 'mpich@4: target=x86_64_v3'
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc'
|
||||
require: '~wrapperrpath ~hwloc target=x86_64_v3'
|
||||
unzip:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
binutils:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
llvm:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
ruby:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
rust:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
krb5:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
papi:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
openssh:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
dyninst:
|
||||
require: "%gcc"
|
||||
require: "%gcc target=x86_64_v3"
|
||||
bison:
|
||||
require: '%gcc'
|
||||
require: '%gcc target=x86_64_v3'
|
||||
paraview:
|
||||
require: "+examples %oneapi target=x86_64_v3"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- aml
|
||||
- adios
|
||||
- amrex
|
||||
- arborx
|
||||
@@ -79,7 +82,6 @@ spack:
|
||||
- chai
|
||||
- charliecloud
|
||||
- conduit
|
||||
# - cp2k +mpi # dbcsr
|
||||
- datatransferkit
|
||||
- drishti
|
||||
- exaworks
|
||||
@@ -105,7 +107,7 @@ spack:
|
||||
- kokkos-kernels +openmp
|
||||
- laghos
|
||||
- lammps
|
||||
- lbann
|
||||
# - lbann # 2024.2 internal compiler error
|
||||
- legion
|
||||
- libnrm
|
||||
- libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp
|
||||
@@ -122,13 +124,14 @@ spack:
|
||||
- netlib-scalapack
|
||||
- nrm
|
||||
- omega-h
|
||||
- openfoam
|
||||
- openmpi
|
||||
- papi
|
||||
- papyrus
|
||||
- parsec ~cuda
|
||||
- petsc
|
||||
- phist
|
||||
- plasma
|
||||
# - plasma # 2024.2 internal compiler error
|
||||
- plumed
|
||||
- precice
|
||||
- pruners-ninja
|
||||
@@ -160,7 +163,7 @@ spack:
|
||||
- wannier90
|
||||
- xyce +mpi +shared +pymi +pymi_static_tpls
|
||||
# INCLUDED IN ECP DAV CPU
|
||||
- adios2 # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing]
|
||||
- adios2
|
||||
- ascent
|
||||
- darshan-runtime
|
||||
- darshan-util
|
||||
@@ -178,27 +181,28 @@ spack:
|
||||
- zfp
|
||||
# --
|
||||
# - alquimia # pflotran: https://github.com/spack/spack/issues/39474
|
||||
# - bricks ~cuda # bricks: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
|
||||
# - cp2k +mpi # dbcsr
|
||||
# - dealii # dealii: https://github.com/spack/spack/issues/39482
|
||||
# - dxt-explorer # r: https://github.com/spack/spack/issues/40257
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # embree: CMake Error at CMakeLists.txt:215 (MESSAGE): Unsupported compiler: IntelLLVM; qt: qtbase/src/corelib/global/qendian.h:333:54: error: incomplete type 'std::numeric_limits' used in nested name specifier
|
||||
# - geopm # geopm issue: https://github.com/spack/spack/issues/38795
|
||||
# - glvis ^llvm # glvis: https://github.com/spack/spack/issues/42839
|
||||
# - hpctoolkit # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing]
|
||||
# - openfoam # cgal: https://github.com/spack/spack/issues/39481
|
||||
# - openpmd-api # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing]
|
||||
# - swig@4.0.2-fortran # ?
|
||||
# - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.10.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory
|
||||
# --
|
||||
# - bricks ~cuda # bricks: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
|
||||
# - glvis ^llvm # glvis: https://github.com/spack/spack/issues/42839
|
||||
# - pdt # pdt: pdbType.cc:193:21: warning: ISO C++11 does not allow conversion from string literal to 'char *' [-Wwritable-strings]
|
||||
# - quantum-espresso # quantum-espresso@7.2 /i3fqdx5: warning: <unknown>:0:0: loop not unroll-and-jammed: the optimizer was unable to perform the requested transformation; the transformation might be disabled or specified as part of an unsupported transformation ordering
|
||||
# - swig@4.0.2-fortran # ?
|
||||
# - tau +mpi +python +syscall # pdt: pdbType.cc:193:21: warning: ISO C++11 does not allow conversion from string literal to 'char *' [-Wwritable-strings]
|
||||
# - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.10.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory
|
||||
|
||||
# PYTHON PACKAGES
|
||||
- opencv +python3
|
||||
- py-jupyterlab
|
||||
- py-mpi4py
|
||||
- py-notebook
|
||||
- py-numba
|
||||
- py-numpy
|
||||
- py-openai
|
||||
- py-pandas
|
||||
@@ -208,8 +212,6 @@ spack:
|
||||
- py-scikit-learn
|
||||
- py-scipy
|
||||
- py-seaborn
|
||||
- py-mpi4py
|
||||
- py-numba
|
||||
# - py-horovod # error
|
||||
# - py-jax # error
|
||||
# - py-matplotlib # error
|
||||
@@ -217,31 +219,28 @@ spack:
|
||||
# - py-torch # error
|
||||
|
||||
# GPU
|
||||
- aml +ze
|
||||
- amrex +sycl
|
||||
- arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +examples
|
||||
- cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +examples
|
||||
- ginkgo +sycl
|
||||
- heffte +sycl
|
||||
- kokkos +sycl +openmp cxxstd=17 +examples
|
||||
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +examples
|
||||
- petsc +sycl
|
||||
- sundials +sycl cxxstd=17 +examples-install
|
||||
- tau +mpi +opencl +level_zero ~pdt +syscall # requires libdrm.so to be installed
|
||||
- upcxx +level_zero
|
||||
# --
|
||||
# - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc
|
||||
# - warpx compute=sycl # warpx: spack-build-wzp6vvo/_deps/fetchedamrex-src/Src/Base/AMReX_RandomEngine.H:18:10: fatal error: 'oneapi/mkl/rng/device.hpp' file not found
|
||||
# --
|
||||
- aml # aml: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
|
||||
- aml +ze # aml: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
|
||||
- arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +examples # kokkos@4.2.00: CMake Error at cmake/Modules/FindTPLONEDPL.cmake:31 (FIND_PACKAGE):
|
||||
- cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +examples # kokkos@4.2.00: CMake Error at cmake/Modules/FindTPLONEDPL.cmake:31 (FIND_PACKAGE):
|
||||
- ginkgo +sycl # ginkgo: Could NOT find PAPI (missing: PAPI_LIBRARY PAPI_INCLUDE_DIR sde) (Required is at least version "7.0.1.0") SYCL feature test compile failed! compile output is: CMake Error at /opt/intel/oneapi/compiler/2024.0/lib/cmake/IntelSYCL/IntelSYCLConfig.cmake:282 (SYCL_FEATURE_TEST_EXTRACT): SYCL_FEATURE_TEST_EXTRACT Function invoked with incorrect arguments for
|
||||
- heffte +sycl # heffte: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
|
||||
- kokkos +sycl +openmp cxxstd=17 +examples # kokkos@4.2.00: CMake Error at cmake/Modules/FindTPLONEDPL.cmake:31 (FIND_PACKAGE):
|
||||
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +examples # kokkos@4.0.00: tpls/desul/include/desul/atomics/Adapt_SYCL.hpp:83:7: error: no template named 'sycl_memory_scope'
|
||||
- petsc +sycl # kokkos@4.0.00: tpls/desul/include/desul/atomics/Adapt_SYCL.hpp:83:7: error: no template named 'sycl_memory_scope'
|
||||
# - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc
|
||||
# - slate +sycl # blaspp: CMake Error at CMakeLists.txt:313 (find_package): ... set MKL_FOUND to FALSE so package "MKL" is considered to be NOT FOUND.
|
||||
- sundials +sycl cxxstd=17 +examples-install # sundials@6.6.2 /cakfnxs: CMake: could NOT find MPI_CXX (missing: MPI_CXX_WORKS) (Required is at least version "2.0.0")
|
||||
# - warpx compute=sycl # warpx: spack-build-wzp6vvo/_deps/fetchedamrex-src/Src/Base/AMReX_RandomEngine.H:18:10: fatal error: 'oneapi/mkl/rng/device.hpp' file not found
|
||||
|
||||
- py-scipy
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: ghcr.io/spack/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-oneapi-2024.2:2024.06.21
|
||||
|
||||
cdash:
|
||||
build-group: E4S OneAPI
|
||||
|
||||
@@ -35,19 +35,21 @@ spack:
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc'
|
||||
require: '~wrapperrpath ~hwloc %gcc@9.4.0 target=ppc64le'
|
||||
ncurses:
|
||||
require: '@6.3 +termlib'
|
||||
require: '@6.3 +termlib %gcc@9.4.0 target=ppc64le'
|
||||
faodel:
|
||||
require: "~tcmalloc"
|
||||
require: "~tcmalloc %gcc@9.4.0 target=ppc64le"
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
libffi:
|
||||
require: "@3.4.4"
|
||||
require: "@3.4.4 %gcc@9.4.0 target=ppc64le"
|
||||
vtk-m:
|
||||
require: "+examples"
|
||||
require: "+examples %gcc@9.4.0 target=ppc64le"
|
||||
cuda:
|
||||
require: "@11.4.4"
|
||||
require: "@11.4.4 %gcc@9.4.0 target=ppc64le"
|
||||
paraview:
|
||||
require: "+examples %gcc@9.4.0 target=ppc64le"
|
||||
|
||||
|
||||
specs:
|
||||
@@ -158,7 +160,6 @@ spack:
|
||||
- upcxx
|
||||
- wannier90
|
||||
- xyce +mpi +shared +pymi +pymi_static_tpls
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
|
||||
# INCLUDED IN ECP DAV CPU
|
||||
- adios2
|
||||
- ascent
|
||||
@@ -175,6 +176,7 @@ spack:
|
||||
# - visit # libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
|
||||
- vtk-m
|
||||
- zfp
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
|
||||
# --
|
||||
# - dealii # fltk: https://github.com/spack/spack/issues/38791
|
||||
# - geopm # geopm: https://github.com/spack/spack/issues/38798
|
||||
@@ -210,7 +212,7 @@ spack:
|
||||
- flux-core +cuda
|
||||
- hpctoolkit +cuda
|
||||
- papi +cuda
|
||||
- tau +mpi +cuda # tau: has issue with `spack env depfile` build
|
||||
- tau +mpi +cuda
|
||||
# --
|
||||
# - legion +cuda # legion: needs NVIDIA driver
|
||||
|
||||
@@ -219,7 +221,6 @@ spack:
|
||||
- arborx +cuda cuda_arch=70 ^kokkos +wrapper
|
||||
- caliper +cuda cuda_arch=70
|
||||
- chai +cuda cuda_arch=70 ^umpire ~shared
|
||||
# - cp2k +mpi +cuda cuda_arch=70 # dbcsr
|
||||
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70
|
||||
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0
|
||||
- flecsi +cuda cuda_arch=70
|
||||
@@ -252,6 +253,7 @@ spack:
|
||||
- zfp +cuda cuda_arch=70
|
||||
# --
|
||||
# - axom +cuda cuda_arch=70 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=70 # dbcsr
|
||||
# - cusz +cuda cuda_arch=70 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=70 # fltk: https://github.com/spack/spack/issues/38791
|
||||
# - lammps +cuda cuda_arch=70 # lammps: needs NVIDIA driver
|
||||
|
||||
@@ -16,208 +16,210 @@ spack:
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc'
|
||||
require: '~wrapperrpath ~hwloc %gcc target=x86_64_v3'
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
require: "@5.11 +examples ~qt ^[virtuals=gl] osmesa %gcc target=x86_64_v3"
|
||||
|
||||
# ROCm 5.4.3
|
||||
# ROCm
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: comgr@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hip-rocclr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip-rocclr@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/hip
|
||||
- spec: hip-rocclr@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/hip
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hipblas@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hipcub:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipcub@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hipcub@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hipfft:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipfft@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hipfft@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hipsparse@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
miopen-hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: miopen-hip@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: miopen-hip@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
miopengemm:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: miopengemm@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: miopengemm@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rccl:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rccl@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rccl@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocblas@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocfft:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocfft@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocfft@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-clang-ocl:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-clang-ocl@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-clang-ocl@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-cmake:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-cmake@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-cmake@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-dbgapi:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-dbgapi@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-dbgapi@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-debug-agent:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-debug-agent@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-debug-agent@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-device-libs:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-device-libs@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-device-libs@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-gdb:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-gdb@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-gdb@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
rocm-opencl:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-opencl@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/opencl
|
||||
- spec: rocm-opencl@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/opencl
|
||||
rocm-smi-lib:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-smi-lib@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: rocm-smi-lib@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: hip@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
c++: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
hip: /opt/rocm-5.7.1/hip/bin/hipcc
|
||||
c: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
c++: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
hip: /opt/rocm-6.1.1/hip/bin/hipcc
|
||||
hipify-clang:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipify-clang@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: hipify-clang@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/llvm
|
||||
- spec: llvm-amdgpu@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/llvm
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
cxx: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
c: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
cxx: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
hsakmt-roct:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsakmt-roct@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hsakmt-roct@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.7.1
|
||||
prefix: /opt/rocm-5.7.1/
|
||||
- spec: hsa-rocr-dev@6.1.1
|
||||
prefix: /opt/rocm-6.1.1/
|
||||
extra_atributes:
|
||||
compilers:
|
||||
c: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
cxx: /opt/rocm-5.7.1/llvm/bin/clang++
|
||||
c: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
cxx: /opt/rocm-6.1.1/llvm/bin/clang++
|
||||
roctracer-dev-api:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: roctracer-dev-api@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: roctracer-dev-api@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
roctracer-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: roctracer-dev@4.5.3
|
||||
prefix: /opt/rocm-5.7.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocprim@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocrand:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocrand@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocrand@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
hipsolver:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsolver@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: hipsolver@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocsolver:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocsolver@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocsolver@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocsparse@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocsparse@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocthrust:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocthrust@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocthrust@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocprofiler-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprofiler-dev@5.7.1
|
||||
prefix: /opt/rocm-5.7.1
|
||||
- spec: rocprofiler-dev@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
rocm-core:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocm-core@6.1.1
|
||||
prefix: /opt/rocm-6.1.1
|
||||
|
||||
specs:
|
||||
# ROCM NOARCH
|
||||
- hpctoolkit +rocm
|
||||
- tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build
|
||||
- tau +mpi +rocm +syscall
|
||||
|
||||
# ROCM 908
|
||||
- adios2 +kokkos +rocm amdgpu_target=gfx908
|
||||
- amrex +rocm amdgpu_target=gfx908
|
||||
- arborx +rocm amdgpu_target=gfx908
|
||||
- cabana +rocm amdgpu_target=gfx908
|
||||
- caliper +rocm amdgpu_target=gfx908
|
||||
- chai +rocm amdgpu_target=gfx908
|
||||
# - cp2k +mpi +rocm amdgpu_target=gfx908 # cp2k: Error: KeyError: 'No spec with name rocm in... "-L{}".format(spec["rocm"].libs.directories[0]),
|
||||
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908
|
||||
- exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908
|
||||
- gasnet +rocm amdgpu_target=gfx908
|
||||
- ginkgo +rocm amdgpu_target=gfx908
|
||||
- heffte +rocm amdgpu_target=gfx908
|
||||
@@ -227,12 +229,8 @@ spack:
|
||||
- legion +rocm amdgpu_target=gfx908
|
||||
- magma ~cuda +rocm amdgpu_target=gfx908
|
||||
- mfem +rocm amdgpu_target=gfx908
|
||||
- petsc +rocm amdgpu_target=gfx908
|
||||
- raja ~openmp +rocm amdgpu_target=gfx908
|
||||
# - slate +rocm amdgpu_target=gfx908 # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
|
||||
- slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908
|
||||
- strumpack ~slate +rocm amdgpu_target=gfx908
|
||||
- sundials +rocm amdgpu_target=gfx908
|
||||
- superlu-dist +rocm amdgpu_target=gfx908
|
||||
- tasmanian ~openmp +rocm amdgpu_target=gfx908
|
||||
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx908
|
||||
@@ -244,22 +242,26 @@ spack:
|
||||
# - hdf5-vol-cache
|
||||
# - hdf5-vol-log
|
||||
# - libcatalyst
|
||||
- paraview +rocm amdgpu_target=gfx908
|
||||
# - paraview +rocm amdgpu_target=gfx908 # mesa: https://github.com/spack/spack/issues/44745
|
||||
# - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268
|
||||
# --
|
||||
- adios2 +kokkos +rocm amdgpu_target=gfx908 # adios2:https://github.com/spack/spack/issues/44594
|
||||
# - cp2k +mpi +rocm amdgpu_target=gfx908 # cp2k: Error: KeyError: 'No spec with name rocm in... "-L{}".format(spec["rocm"].libs.directories[0]),
|
||||
# - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # raja: https://github.com/spack/spack/issues/44593
|
||||
# - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807
|
||||
# - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898
|
||||
# - petsc +rocm amdgpu_target=gfx908 # petsc: https://github.com/spack/spack/issues/44600
|
||||
# - slate +rocm amdgpu_target=gfx908 # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
|
||||
# - slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908 # petsc: https://github.com/spack/spack/issues/44600
|
||||
# - sundials +rocm amdgpu_target=gfx908 # sundials: https://github.com/spack/spack/issues/44601
|
||||
|
||||
# ROCM 90a
|
||||
- adios2 +kokkos +rocm amdgpu_target=gfx90a
|
||||
- amrex +rocm amdgpu_target=gfx90a
|
||||
- arborx +rocm amdgpu_target=gfx90a
|
||||
- cabana +rocm amdgpu_target=gfx90a
|
||||
- caliper +rocm amdgpu_target=gfx90a
|
||||
- chai +rocm amdgpu_target=gfx90a
|
||||
# - cp2k +mpi +rocm amdgpu_target=gfx90a # cp2k: Error: KeyError: 'No spec with name rocm in... "-L{}".format(spec["rocm"].libs.directories[0]),
|
||||
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a
|
||||
- exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a
|
||||
- gasnet +rocm amdgpu_target=gfx90a
|
||||
- ginkgo +rocm amdgpu_target=gfx90a
|
||||
- heffte +rocm amdgpu_target=gfx90a
|
||||
@@ -269,12 +271,8 @@ spack:
|
||||
- legion +rocm amdgpu_target=gfx90a
|
||||
- magma ~cuda +rocm amdgpu_target=gfx90a
|
||||
- mfem +rocm amdgpu_target=gfx90a
|
||||
- petsc +rocm amdgpu_target=gfx90a
|
||||
- raja ~openmp +rocm amdgpu_target=gfx90a
|
||||
# - slate +rocm amdgpu_target=gfx90a # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
|
||||
- slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a
|
||||
- strumpack ~slate +rocm amdgpu_target=gfx90a
|
||||
- sundials +rocm amdgpu_target=gfx90a
|
||||
- superlu-dist +rocm amdgpu_target=gfx90a
|
||||
- tasmanian ~openmp +rocm amdgpu_target=gfx90a
|
||||
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx90a
|
||||
@@ -286,16 +284,23 @@ spack:
|
||||
# - hdf5-vol-cache
|
||||
# - hdf5-vol-log
|
||||
# - libcatalyst
|
||||
- paraview +rocm amdgpu_target=gfx90a
|
||||
# - paraview +rocm amdgpu_target=gfx90a # mesa: https://github.com/spack/spack/issues/44745
|
||||
# - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268
|
||||
# --
|
||||
- adios2 +kokkos +rocm amdgpu_target=gfx90a # adios2: https://github.com/spack/spack/issues/44594
|
||||
# - cp2k +mpi +rocm amdgpu_target=gfx90a # cp2k: Error: KeyError: 'No spec with name rocm in... "-L{}".format(spec["rocm"].libs.directories[0]),
|
||||
# - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # raja: https://github.com/spack/spack/issues/44593
|
||||
# - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807
|
||||
# - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898
|
||||
# - petsc +rocm amdgpu_target=gfx90a # petsc: https://github.com/spack/spack/issues/44600
|
||||
# - slate +rocm amdgpu_target=gfx90a # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
|
||||
# - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a # petsc: https://github.com/spack/spack/issues/44600
|
||||
# - sundials +rocm amdgpu_target=gfx90a # sundials: https://github.com/spack/spack/issues/44601
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm5.7.1:2024.03.01
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm6.1.1:2024.06.23
|
||||
|
||||
cdash:
|
||||
build-group: E4S ROCm External
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user