Compare commits
305 Commits
alby/libvt
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a864108bc1 | ||
|
|
4aed051b73 | ||
|
|
43996e84c3 | ||
|
|
14ebf1985e | ||
|
|
0c2b98ca70 | ||
|
|
43143b134a | ||
|
|
45697582dc | ||
|
|
9a214ffb75 | ||
|
|
4286c7398b | ||
|
|
131acbdacc | ||
|
|
86d2399c76 | ||
|
|
d89d6dab6d | ||
|
|
e6c94e9126 | ||
|
|
af5b93bb97 | ||
|
|
dc25da1931 | ||
|
|
067e40591a | ||
|
|
483688580e | ||
|
|
7448acaf98 | ||
|
|
dfe2d5dca2 | ||
|
|
b980fcff64 | ||
|
|
b405559e7b | ||
|
|
7c5e3ddac5 | ||
|
|
6ffc11c46d | ||
|
|
a45d129f86 | ||
|
|
3ccc527d74 | ||
|
|
51c75c6da3 | ||
|
|
c3f4822f92 | ||
|
|
ccae0ad534 | ||
|
|
95fffe16a8 | ||
|
|
640ccf2ef9 | ||
|
|
78f33bc002 | ||
|
|
25cc734452 | ||
|
|
71b17a6945 | ||
|
|
9425df1259 | ||
|
|
e81076edd0 | ||
|
|
84043d97b7 | ||
|
|
f93b61338c | ||
|
|
526ae84137 | ||
|
|
d960d29485 | ||
|
|
5388ab1ac1 | ||
|
|
9ce075ed3d | ||
|
|
46419502cb | ||
|
|
50623f6bde | ||
|
|
d8922233ce | ||
|
|
26a98f4c14 | ||
|
|
b6b33cfe7a | ||
|
|
41582f76bd | ||
|
|
97972d300a | ||
|
|
7470d14b35 | ||
|
|
ac825bd9d4 | ||
|
|
e24bf70af4 | ||
|
|
dfbdcaf551 | ||
|
|
cc5ea14a6e | ||
|
|
efd2ed750d | ||
|
|
ab10b645c6 | ||
|
|
e79f275bc9 | ||
|
|
09b4ed6c80 | ||
|
|
66f75407d1 | ||
|
|
5db241c755 | ||
|
|
1dcc67535a | ||
|
|
46fe1f48bc | ||
|
|
30201e3381 | ||
|
|
501bb88de2 | ||
|
|
c5adb05433 | ||
|
|
8528106484 | ||
|
|
134dceb055 | ||
|
|
aa3744299b | ||
|
|
105ac0c377 | ||
|
|
1949f67a71 | ||
|
|
0314071763 | ||
|
|
6e13d7d917 | ||
|
|
2d4758bdd9 | ||
|
|
ff002316a8 | ||
|
|
251282812b | ||
|
|
4ac43b5032 | ||
|
|
05b6ac16bc | ||
|
|
8164712264 | ||
|
|
ce0b9ea8cf | ||
|
|
c560053c39 | ||
|
|
5b0ca6d287 | ||
|
|
887d356e01 | ||
|
|
95ca9dea89 | ||
|
|
cb23362b7f | ||
|
|
42c4a8b388 | ||
|
|
cc1f403385 | ||
|
|
b1d281f197 | ||
|
|
29a1c418b3 | ||
|
|
36dd325187 | ||
|
|
585e150816 | ||
|
|
9a30ba1a4d | ||
|
|
d5bb152165 | ||
|
|
0c6d0541f0 | ||
|
|
7b977dc103 | ||
|
|
f98bfebce4 | ||
|
|
4907315079 | ||
|
|
48168de1cc | ||
|
|
d99892e490 | ||
|
|
09d9b48957 | ||
|
|
62aa9d87ee | ||
|
|
0470fe545f | ||
|
|
db8bf333d3 | ||
|
|
f73c8f2255 | ||
|
|
42ed4d81b7 | ||
|
|
e76b039997 | ||
|
|
b49d098e3f | ||
|
|
cd67b2a1a9 | ||
|
|
a076548bd4 | ||
|
|
3d342ac69a | ||
|
|
88fc8ae591 | ||
|
|
ff6ac42812 | ||
|
|
c96f93b2a3 | ||
|
|
cbe4a48291 | ||
|
|
ebd41134fe | ||
|
|
77817a0f05 | ||
|
|
590d3ba6cf | ||
|
|
1e8988f11d | ||
|
|
a889669cbc | ||
|
|
fde33e66be | ||
|
|
6314ddacf2 | ||
|
|
f935f36b21 | ||
|
|
082934f73f | ||
|
|
3e9e01e496 | ||
|
|
2abbfe719d | ||
|
|
ace20c5d29 | ||
|
|
f35fcee6be | ||
|
|
103370d84a | ||
|
|
6e47f1645f | ||
|
|
53eb6c46db | ||
|
|
00d769d644 | ||
|
|
b6b34aa0fe | ||
|
|
83b9196e78 | ||
|
|
ed76eab694 | ||
|
|
930b843885 | ||
|
|
f53c68e005 | ||
|
|
12a22eebc7 | ||
|
|
69eb15936c | ||
|
|
c69dea5465 | ||
|
|
3bd8c4df28 | ||
|
|
02dc697831 | ||
|
|
87cb39b860 | ||
|
|
468138bb4f | ||
|
|
a8534b7345 | ||
|
|
8ba45b0b99 | ||
|
|
6ae358edd8 | ||
|
|
5ce45e0cee | ||
|
|
b2901f1737 | ||
|
|
6b552dedbc | ||
|
|
6b3d2c535f | ||
|
|
2727bd92d8 | ||
|
|
ebbfc0363b | ||
|
|
1b6e1fc852 | ||
|
|
1376ec4887 | ||
|
|
0eec7c5c53 | ||
|
|
05dd240997 | ||
|
|
fb16c81b6c | ||
|
|
7c3b33416f | ||
|
|
6755b74d22 | ||
|
|
d0e843ce03 | ||
|
|
37f6231f2a | ||
|
|
d85f25a901 | ||
|
|
2041b92d3a | ||
|
|
f461069888 | ||
|
|
9eb3de85c5 | ||
|
|
92d970498a | ||
|
|
bd5e99120d | ||
|
|
a7e307bd81 | ||
|
|
55152781cb | ||
|
|
8ce0c7771c | ||
|
|
7e0dfa270f | ||
|
|
7dc485d288 | ||
|
|
5c6c3b403b | ||
|
|
242854f266 | ||
|
|
e9406a7d9e | ||
|
|
0ac1c52d17 | ||
|
|
a3c42715db | ||
|
|
0f27188383 | ||
|
|
99f3b9f064 | ||
|
|
d1bc4c4ef1 | ||
|
|
69a5c55702 | ||
|
|
2972d5847c | ||
|
|
1577eb9602 | ||
|
|
2f97c6ead2 | ||
|
|
1df4afb53f | ||
|
|
4991f0e484 | ||
|
|
09fd7d68eb | ||
|
|
2ace8a55c1 | ||
|
|
861acb9467 | ||
|
|
eea743de46 | ||
|
|
e2b6e5a7ec | ||
|
|
2f2dc3695c | ||
|
|
6eb5e57199 | ||
|
|
9a047eb95f | ||
|
|
ef42fd7a2f | ||
|
|
e642c2ea28 | ||
|
|
f27d012e0c | ||
|
|
c638311796 | ||
|
|
2a02bea405 | ||
|
|
219b42d991 | ||
|
|
c290ec1f62 | ||
|
|
e7ede86733 | ||
|
|
e3e7609af4 | ||
|
|
49d7ebec36 | ||
|
|
7c3d82d819 | ||
|
|
1c0fbec9ce | ||
|
|
ca4d60ae25 | ||
|
|
dc571e20d6 | ||
|
|
1485275d0c | ||
|
|
1afbf72037 | ||
|
|
407fd80f95 | ||
|
|
62525d9076 | ||
|
|
c2371263d1 | ||
|
|
5a870182ec | ||
|
|
e33ad83256 | ||
|
|
0352a1df5d | ||
|
|
ade44bce62 | ||
|
|
ddb29ebc34 | ||
|
|
19a62630e5 | ||
|
|
5626802aa0 | ||
|
|
f68063afbc | ||
|
|
8103d019d6 | ||
|
|
ce89cdd9d7 | ||
|
|
20d9b356f0 | ||
|
|
3401438a3a | ||
|
|
dcf1999d22 | ||
|
|
9e3c3ae298 | ||
|
|
40d6b84b4d | ||
|
|
2db09f27af | ||
|
|
6979d6a96f | ||
|
|
deffd2acc9 | ||
|
|
988f71f434 | ||
|
|
4fe76f973a | ||
|
|
8e4e6ad529 | ||
|
|
3586a2dbe3 | ||
|
|
4648939043 | ||
|
|
746eaaf01a | ||
|
|
bd2f78ae9a | ||
|
|
a4ebe01dec | ||
|
|
94e9e18558 | ||
|
|
d2e0ac4d1f | ||
|
|
36321fef1c | ||
|
|
e879877878 | ||
|
|
f0bce3eb25 | ||
|
|
316bfd8b7d | ||
|
|
92593fecd5 | ||
|
|
8db5fecdf5 | ||
|
|
eee696f320 | ||
|
|
8689cf392f | ||
|
|
15d4cce2eb | ||
|
|
45fbb82d1a | ||
|
|
2861c89b89 | ||
|
|
135bfeeb27 | ||
|
|
8fa9c66a7d | ||
|
|
5e6174cbe2 | ||
|
|
b4ad883b0d | ||
|
|
a681111a23 | ||
|
|
d2436afb66 | ||
|
|
e43444cbb6 | ||
|
|
8c0d947114 | ||
|
|
5ba4a2b83a | ||
|
|
da45073ef9 | ||
|
|
61e17fb36d | ||
|
|
9f13a90dd2 | ||
|
|
ef4b35ea63 | ||
|
|
66187c8a6e | ||
|
|
c8d95512fc | ||
|
|
c74fa648b9 | ||
|
|
4cc5e9cac6 | ||
|
|
41345d18f9 | ||
|
|
0dd1316b68 | ||
|
|
d8cc185e22 | ||
|
|
061051270c | ||
|
|
61445159db | ||
|
|
7fa3c7f0fa | ||
|
|
9c0fe30f42 | ||
|
|
d00010819f | ||
|
|
248b05b32a | ||
|
|
8232e934e9 | ||
|
|
9d005839af | ||
|
|
a7e5c73608 | ||
|
|
7896625919 | ||
|
|
fb43cb8166 | ||
|
|
28f68e5d11 | ||
|
|
1199eeed0b | ||
|
|
8ffeb4900b | ||
|
|
456550da3f | ||
|
|
b2676fe2dd | ||
|
|
8561ec6249 | ||
|
|
5b775d82ac | ||
|
|
b43088cc16 | ||
|
|
237eab136a | ||
|
|
ffffa2794b | ||
|
|
433b44403f | ||
|
|
fa2e1c0653 | ||
|
|
00257f6824 | ||
|
|
3b8366f3d3 | ||
|
|
a73f511404 | ||
|
|
c823e01baf | ||
|
|
4188080899 | ||
|
|
ef6ea2c93f | ||
|
|
3c672905d0 | ||
|
|
ee106c747f | ||
|
|
295726e6b8 | ||
|
|
2654d64a3c | ||
|
|
d91ec8500f | ||
|
|
c354cc51d0 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
12
.github/workflows/build-containers.yml
vendored
12
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
||||
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
|
||||
7
.github/workflows/valid-style.yml
vendored
7
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -81,6 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
||||
@@ -214,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
||||
132
bin/spack.ps1
Normal file
132
bin/spack.ps1
Normal file
@@ -0,0 +1,132 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
||||
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.1
|
||||
python-levenshtein==0.21.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.2
|
||||
urllib3==2.0.3
|
||||
|
||||
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,30 +416,14 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
@@ -449,9 +433,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
@@ -459,9 +443,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -475,7 +459,7 @@ parse_Wl() {
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
append return_other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -483,177 +467,210 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
unset IFS
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
continue
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -673,12 +690,14 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend flags_list SPACK_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -687,7 +706,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -697,10 +716,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend flags_list SPACK_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -712,10 +731,25 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -741,6 +775,8 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -769,12 +805,16 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -782,11 +822,15 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -795,20 +839,25 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -864,3 +913,4 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
||||
@@ -760,13 +760,12 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
@@ -1209,9 +1208,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
@@ -1281,41 +1277,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1336,7 +1308,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1596,35 +1567,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
|
||||
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
|
||||
@@ -289,6 +289,7 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
|
||||
@@ -102,11 +102,10 @@ def cuda_flags(arch_list):
|
||||
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
|
||||
@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
@@ -167,18 +168,65 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test(self):
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python.path
|
||||
for module in self.import_modules:
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
purpose="checking import of {0}".format(module),
|
||||
with test_part(
|
||||
self,
|
||||
f"test_imports_{module}",
|
||||
purpose=f"checking import of {module}",
|
||||
work_dir="spack-test",
|
||||
)
|
||||
):
|
||||
python("-c", f"import {module}")
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -225,51 +273,6 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
||||
@@ -28,7 +28,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.compilers as compilers
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -70,17 +69,10 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == "specs" else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
"""Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
@@ -93,12 +85,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_str = ""
|
||||
format_args = []
|
||||
|
||||
if phase:
|
||||
format_str += "({{{0}}})".format(item_idx)
|
||||
format_args.append(phase)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_str += "{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
@@ -110,10 +97,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(osarch)
|
||||
@@ -153,49 +139,33 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=False, mirrors_to_check=None
|
||||
):
|
||||
spec_deps_obj = _compute_spec_deps(
|
||||
specs, check_index_only=check_index_only, mirrors_to_check=mirrors_to_check
|
||||
)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = _compute_spec_deps(specs)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj["dependencies"]
|
||||
specs = spec_deps_obj["specs"]
|
||||
|
||||
for entry in specs:
|
||||
spec_labels[entry["label"]] = {
|
||||
"spec": entry["spec"],
|
||||
"needs_rebuild": entry["needs_rebuild"],
|
||||
}
|
||||
spec_labels[entry["label"]] = entry["spec"]
|
||||
|
||||
for entry in dependencies:
|
||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
|
||||
Arguments:
|
||||
specs (Iterable): Specs to build
|
||||
check_index_only (bool): Regardless of whether DAG pruning is enabled,
|
||||
all configured mirrors are searched to see if binaries for specs
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
|
||||
spec_labels: A dictionary mapping the spec labels which are made of
|
||||
(pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
|
||||
keys. The root spec is the spec of which this spec is a dependency
|
||||
and the spec is the formatted spec string for this spec.
|
||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||
as pkg-name/hash-prefix) to concrete specs.
|
||||
|
||||
deps: A dictionary where the keys should also have appeared as keys in
|
||||
the spec_labels dictionary, and the values are the set of
|
||||
@@ -224,13 +194,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(
|
||||
specs,
|
||||
deps,
|
||||
spec_labels,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
_get_spec_dependencies(specs, deps, spec_labels)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -256,24 +220,36 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(" Staging summary ([x] means a job needs rebuilding):")
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
s = spec_labels[job]["spec"]
|
||||
s = spec_labels[job]
|
||||
rebuild = rebuild_decisions[job].rebuild
|
||||
reason = rebuild_decisions[job].reason
|
||||
reason_msg = " ({0})".format(reason) if reason else ""
|
||||
tty.msg(
|
||||
" [{1}] {0} -> {2}".format(
|
||||
job, "x" if spec_labels[job]["needs_rebuild"] else " ", _get_spec_string(s)
|
||||
" [{1}] {0} -> {2}{3}".format(
|
||||
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||
)
|
||||
)
|
||||
if rebuild_decisions[job].mirrors:
|
||||
tty.msg(" found on the following mirrors:")
|
||||
for murl in rebuild_decisions[job].mirrors:
|
||||
tty.msg(" {0}".format(murl))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def _compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -337,12 +313,8 @@ def append_dep(s, d):
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
@@ -352,14 +324,8 @@ def append_dep(s, d):
|
||||
|
||||
append_dep(skey, dkey)
|
||||
|
||||
for spec_label, spec_holder in spec_labels.items():
|
||||
specs.append(
|
||||
{
|
||||
"label": spec_label,
|
||||
"spec": spec_holder["spec"],
|
||||
"needs_rebuild": spec_holder["needs_rebuild"],
|
||||
}
|
||||
)
|
||||
for spec_label, concrete_spec in spec_labels.items():
|
||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
@@ -371,26 +337,17 @@ def _spec_matches(spec, match_string):
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
stage_spec_dict,
|
||||
enable_artifacts_buildcache,
|
||||
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or dep_spec_info["needs_rebuild"]:
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(
|
||||
phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
|
||||
),
|
||||
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
@@ -490,17 +447,12 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
return affected_specs
|
||||
|
||||
|
||||
def _build_jobs(phases, staged_phases):
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
def _build_jobs(spec_labels, stages):
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
|
||||
|
||||
def _noop(x):
|
||||
@@ -519,14 +471,21 @@ def _unpack_script(script_section, op=_noop):
|
||||
return script
|
||||
|
||||
|
||||
class RebuildDecision(object):
|
||||
def __init__(self):
|
||||
self.rebuild = True
|
||||
self.mirrors = []
|
||||
self.reason = ""
|
||||
|
||||
|
||||
class SpackCI:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config, phases, staged_phases):
|
||||
def __init__(self, ci_config, spec_labels, stages):
|
||||
"""Given the information from the ci section of the config
|
||||
and the job phases setup meta data needed for generating Spack
|
||||
and the staged jobs, set up meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
@@ -541,9 +500,6 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"bootstrap": self.ci_config.get(
|
||||
"bootstrap", []
|
||||
), # This is deprecated and should be removed
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -551,7 +507,7 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
||||
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
||||
jobs[dag_hash] = self.__init_job(spec)
|
||||
|
||||
for name in self.named_jobs:
|
||||
@@ -873,25 +829,6 @@ def generate_gitlab_ci_yaml(
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if "bootstrap" in ci_config:
|
||||
for phase in ci_config["bootstrap"]:
|
||||
try:
|
||||
phase_name = phase.get("name")
|
||||
strip_compilers = phase.get("compiler-agnostic")
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
)
|
||||
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
@@ -993,39 +930,13 @@ def generate_gitlab_ci_yaml(
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.warn(e)
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
if phase_name == "specs":
|
||||
# Anything in the "specs" of the environment are already
|
||||
# concretized by the block at the top of this method, so we
|
||||
# only need to find the concrete versions, and then avoid
|
||||
# re-concretizing them needlessly later on.
|
||||
concrete_phase_specs = [
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists[phase_name]
|
||||
]
|
||||
else:
|
||||
# Any specs lists in other definitions (but not in the
|
||||
# "specs") of the environment are not yet concretized so we
|
||||
# have to concretize them explicitly here.
|
||||
concrete_phase_specs = env.spec_lists[phase_name]
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
for phase_spec in concrete_phase_specs:
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
spec_labels, dependencies, stages = stage_spec_jobs(
|
||||
[
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists["specs"]
|
||||
]
|
||||
)
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -1048,276 +959,212 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||
spack_ci = SpackCI(ci_config, spec_labels, stages)
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
strip_compilers = phase["strip-compilers"]
|
||||
rebuild_decisions = {}
|
||||
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
spec_record = RebuildDecision()
|
||||
rebuild_decisions[spec_label] = spec_record
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, untouched by change.".format(
|
||||
release_spec.name, release_spec.dag_hash()[:7]
|
||||
)
|
||||
)
|
||||
spec_record["needs_rebuild"] = False
|
||||
continue
|
||||
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
spec_record.rebuild = False
|
||||
spec_record.reason = "Pruned, untouched by change."
|
||||
continue
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
if up_to_date_mirrors:
|
||||
spec_record.reason = "Pruned, found in mirrors"
|
||||
spec_record.mirrors = [m["mirror_url"] for m in up_to_date_mirrors]
|
||||
else:
|
||||
spec_record.reason = "Scheduled, not found anywhere"
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
continue
|
||||
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(
|
||||
phase_name, strip_compilers, release_spec, osname, build_group
|
||||
)
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
|
||||
compiler_action = "NONE"
|
||||
if len(phases) > 1:
|
||||
compiler_action = "FIND_ANY"
|
||||
if _is_main_phase(phase_name):
|
||||
compiler_action = "INSTALL_MISSING"
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label]["spec"])
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
spec_labels,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
|
||||
rebuild_spec = spec_record["needs_rebuild"]
|
||||
|
||||
# This next section helps gitlab make sure the right
|
||||
# bootstrapped compiler exists in the artifacts buildcache by
|
||||
# creating an artificial dependency between this spec and its
|
||||
# compiler. So, if we are in the main phase, and if the
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if _is_main_phase(phase_name):
|
||||
spec_arch_family = release_spec.architecture.target.microarchitecture.family
|
||||
compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
|
||||
for bs in bootstrap_specs:
|
||||
c_spec = bs["spec"]
|
||||
bs_arch = c_spec.architecture
|
||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||
if (
|
||||
c_spec.intersects(compiler_pkg_spec)
|
||||
and bs_arch_family == spec_arch_family
|
||||
):
|
||||
# We found the bootstrap compiler this release spec
|
||||
# should be built with, so for DAG scheduling
|
||||
# purposes, we will at least add the compiler spec
|
||||
# to the jobs "needs". But if artifact buildcache
|
||||
# is enabled, we'll have to add all transtive deps
|
||||
# of the compiler as well.
|
||||
|
||||
# Here we check whether the bootstrapped compiler
|
||||
# needs to be rebuilt. Until compilers are proper
|
||||
# dependencies, we artificially force the spec to
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs["phase-name"]]
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
# add this spec to their "needs"
|
||||
spec_record["needs_rebuild"] = rebuild_spec
|
||||
|
||||
dep_jobs = [c_spec]
|
||||
if enable_artifacts_buildcache:
|
||||
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
bs["phase-name"],
|
||||
bs["strip-compilers"],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
else:
|
||||
debug_msg = "".join(
|
||||
[
|
||||
"Considered compiler {0} for spec ",
|
||||
"{1}, but rejected it either because it was ",
|
||||
"not the compiler required by the spec, or ",
|
||||
"because the target arch families of the ",
|
||||
"spec and the compiler did not match",
|
||||
]
|
||||
).format(c_spec, release_spec)
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec and not copy_only_pipeline:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||
release_spec.name, release_spec.dag_hash()
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
rebuild_spec = spec_record.rebuild
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
if not rebuild_spec and not copy_only_pipeline:
|
||||
if prune_dag:
|
||||
spec_record.reason = "Pruned, up-to-date"
|
||||
continue
|
||||
else:
|
||||
# DAG pruning is disabled, force the spec to rebuild. The
|
||||
# record still contains any mirrors on which the spec
|
||||
# may have been found, so we can print them in the staging
|
||||
# summary.
|
||||
spec_record.rebuild = True
|
||||
spec_record.reason = "Scheduled, DAG pruning disabled"
|
||||
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
_print_staging_summary(*phase_stages)
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1576,44 +1423,6 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
"""Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == "INSTALL_MISSING":
|
||||
tty.debug("Make sure bootstrapped compiler will be installed")
|
||||
config = cfg.get("config")
|
||||
config["install_missing_compilers"] = True
|
||||
cfg.set("config", config)
|
||||
elif compiler_action == "FIND_ANY":
|
||||
tty.debug("Just find any available compiler")
|
||||
find_args = ["find"]
|
||||
if scope:
|
||||
find_args.extend(["--scope", scope])
|
||||
output = spack_compiler(*find_args)
|
||||
tty.debug("spack compiler find")
|
||||
tty.debug(output)
|
||||
output = spack_compiler("list")
|
||||
tty.debug("spack compiler list")
|
||||
tty.debug(output)
|
||||
else:
|
||||
tty.debug("No compiler action to be taken")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
|
||||
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
@@ -268,75 +224,21 @@ def setup_parser(subparser):
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob",
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -710,21 +557,11 @@ def sync_fn(args):
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
tty.die("Provide mirrors to sync from and to.")
|
||||
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -803,13 +640,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
||||
@@ -274,7 +274,6 @@ def ci_rebuild(args):
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
@@ -295,7 +294,6 @@ def ci_rebuild(args):
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
@@ -411,14 +409,6 @@ def ci_rebuild(args):
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Depending on the specifics of this job, we might need to turn on the
|
||||
# "config:install_missing compilers" option (to build this job spec
|
||||
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
||||
# (to build a bootstrap compiler or one of its deps in a
|
||||
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
|
||||
@@ -36,7 +36,10 @@ def shell_init_instructions(cmd, equivalent):
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
" %s\\spack_cmd.bat" % spack.paths.bin_path,
|
||||
"",
|
||||
color.colorize("@*c{For PowerShell:}"),
|
||||
" %s\\setup-env.ps1" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run "
|
||||
+ ("one of these" if shell_specific else "this")
|
||||
@@ -50,6 +53,7 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch",
|
||||
equivalent.format(sh_arg="--pwsh") + " # powershell",
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
||||
@@ -349,7 +349,7 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=False,
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
@@ -357,6 +357,17 @@ def install_status():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
|
||||
@@ -715,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.split("\n")
|
||||
lines = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
|
||||
@@ -86,6 +86,13 @@ def env_activate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print powershell commands to activate environment",
|
||||
)
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
|
||||
@@ -44,7 +44,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
|
||||
@@ -31,7 +31,11 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
"-y",
|
||||
|
||||
@@ -164,7 +164,10 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = vn.Version("6")
|
||||
_db_version = vn.Version("7")
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -72,6 +72,7 @@
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(vn.Version("0.9.3"), vn.Version("5")),
|
||||
(vn.Version("5"), vn.Version("6")),
|
||||
(vn.Version("6"), vn.Version("7")),
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -105,7 +106,11 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
@@ -743,7 +748,9 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -766,7 +773,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1172,7 +1179,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
||||
@@ -125,7 +125,7 @@ def default_manifest_yaml():
|
||||
valid_environment_name_re = r"^\w[\w-]*$"
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
lockfile_format_version = 5
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
@@ -133,6 +133,7 @@ def default_manifest_yaml():
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
5: spack.spec.SpecfileV4,
|
||||
}
|
||||
|
||||
|
||||
@@ -1548,12 +1549,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype="test")
|
||||
for test_dependency in test_deps:
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test"
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2184,9 +2186,9 @@ def _read_lockfile_dict(self, d):
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
|
||||
@@ -42,6 +42,8 @@ def activate_header(env, shell, prompt=None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -79,6 +81,8 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
||||
@@ -544,6 +544,7 @@ def _static_edges(specs, deptype):
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -231,7 +231,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -671,7 +671,14 @@ def configure_options(self):
|
||||
# the configure option section
|
||||
return None
|
||||
|
||||
def modification_needs_formatting(self, modification):
|
||||
"""Returns True if environment modification entry needs to be formatted."""
|
||||
return (
|
||||
not isinstance(modification, (spack.util.environment.SetEnv)) or not modification.raw
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -733,15 +740,29 @@ def environment_modifications(self):
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = spec.format(x.name, transform=transform)
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
x.name = str(x.name).replace("-", "_")
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
||||
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
|
||||
@@ -291,7 +291,7 @@ def next_spec(
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV4.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
||||
@@ -676,7 +676,7 @@ def is_relocatable(spec):
|
||||
Raises:
|
||||
ValueError: if the spec is not installed
|
||||
"""
|
||||
if not spec.install_status():
|
||||
if not spec.installed:
|
||||
raise ValueError("spec is not installed [{0}]".format(str(spec)))
|
||||
|
||||
if spec.external or spec.virtual:
|
||||
|
||||
@@ -134,23 +134,6 @@
|
||||
core_shared_properties = union_dicts(
|
||||
{
|
||||
"pipeline-gen": pipeline_gen_schema,
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
|
||||
@@ -2500,10 +2500,15 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
@@ -2581,6 +2586,8 @@ def sort_fn(function_tuple):
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
return (0, 0) # note out of order so this goes last
|
||||
elif name == "virtual_on_edge":
|
||||
return (1, 0)
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
|
||||
@@ -300,6 +300,11 @@ attr("depends_on", Package, Provider, Type)
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
attr("virtual_on_edge", Package, Provider, Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import enum
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -170,7 +171,17 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
# InstallStatus is used to map install statuses to symbols for display
|
||||
# Options are artificially disjoint for dispay purposes
|
||||
class InstallStatus(enum.Enum):
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
absent = "@K{ - } "
|
||||
missing = "@r{[-]} "
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -714,47 +725,81 @@ class DependencySpec:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "deptypes"
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
|
||||
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.deptypes = dp.canonical_deptype(deptypes)
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
|
||||
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
|
||||
deptypes = set(deptypes)
|
||||
deptypes.update(self.deptypes)
|
||||
deptypes = tuple(sorted(deptypes))
|
||||
changed = self.deptypes != deptypes
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
self.deptypes = deptypes
|
||||
return changed
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
|
||||
|
||||
def add_type(self, type: dp.DependencyArgument):
|
||||
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "%s %s--> %s" % (
|
||||
self.parent.name if self.parent else None,
|
||||
self.deptypes,
|
||||
self.spec.name if self.spec else None,
|
||||
)
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
class CompilerFlag(str):
|
||||
@@ -1575,10 +1620,12 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
@@ -1596,34 +1643,58 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
)
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptypes)
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1896,12 +1967,12 @@ def lookup_hash(self):
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=())
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=())
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2036,8 +2107,14 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
@@ -2361,7 +2438,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2379,8 +2456,10 @@ def from_dict(data):
|
||||
spec = SpecfileV1.load(data)
|
||||
elif int(data["spec"]["_meta"]["version"]) == 2:
|
||||
spec = SpecfileV2.load(data)
|
||||
else:
|
||||
elif int(data["spec"]["_meta"]["version"]) == 3:
|
||||
spec = SpecfileV3.load(data)
|
||||
else:
|
||||
spec = SpecfileV4.load(data)
|
||||
|
||||
# Any git version should
|
||||
for s in spec.traverse():
|
||||
@@ -2529,6 +2608,7 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||
def _replace_with(self, concrete):
|
||||
"""Replace this virtual spec with a concrete spec."""
|
||||
assert self.virtual
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
@@ -2539,7 +2619,11 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -3180,7 +3264,9 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
@@ -3236,7 +3322,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3573,15 +3659,20 @@ def _constrain_dependencies(self, other):
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3965,7 +4056,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4401,12 +4492,20 @@ def __str__(self):
|
||||
def install_status(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.installed
|
||||
except KeyError:
|
||||
return None
|
||||
return InstallStatus.absent
|
||||
|
||||
if self.external:
|
||||
return InstallStatus.external
|
||||
|
||||
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
|
||||
if not record:
|
||||
return InstallStatus.absent
|
||||
elif upstream and record.installed:
|
||||
return InstallStatus.upstream
|
||||
elif record.installed:
|
||||
return InstallStatus.installed
|
||||
else:
|
||||
return InstallStatus.missing
|
||||
|
||||
def _installed_explicitly(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
@@ -4420,7 +4519,10 @@ def _installed_explicitly(self):
|
||||
|
||||
def tree(self, **kwargs):
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation."""
|
||||
with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
"""
|
||||
color = kwargs.pop("color", clr.get_color_when())
|
||||
depth = kwargs.pop("depth", False)
|
||||
hashes = kwargs.pop("hashes", False)
|
||||
@@ -4452,14 +4554,12 @@ def tree(self, **kwargs):
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if node.installed_upstream:
|
||||
out += clr.colorize("@g{[^]} ", color=color)
|
||||
elif status is None:
|
||||
out += clr.colorize("@K{ - } ", color=color) # !installed
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color) # installed
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color) # missing
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
|
||||
@@ -4635,12 +4735,16 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4730,11 +4834,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def reconstruct_virtuals_on_edges(spec):
|
||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||
|
||||
Args:
|
||||
spec: spec on which we want to reconstruct virtuals
|
||||
"""
|
||||
# Collect all possible virtuals
|
||||
possible_virtuals = set()
|
||||
for node in spec.traverse():
|
||||
try:
|
||||
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||
except Exception as e:
|
||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||
continue
|
||||
|
||||
# Assume all incoming edges to provider are marked with virtuals=
|
||||
for vspec in possible_virtuals:
|
||||
try:
|
||||
provider = spec[vspec]
|
||||
except KeyError:
|
||||
# Virtual not in the DAG
|
||||
continue
|
||||
|
||||
for edge in provider.edges_from_dependents():
|
||||
edge.update_virtuals([vspec])
|
||||
|
||||
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
@@ -4818,7 +4951,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@@ -4849,8 +4982,10 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
@@ -4884,9 +5019,10 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4915,18 +5051,20 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
virtuals = []
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4960,7 +5098,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4980,6 +5118,20 @@ class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class SpecfileV4(SpecfileV2):
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
return cls._load(data)
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
|
||||
@@ -201,12 +201,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
|
||||
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -214,13 +214,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-au", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-afu", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -246,35 +246,10 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
|
||||
def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
"""
|
||||
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install 'corge' without using a cache
|
||||
install_cmd("--no-cache", cspec.name)
|
||||
|
||||
# Create build cache with relative rpaths
|
||||
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -291,19 +266,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -320,7 +295,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
cspec = Spec("corge").concretized()
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome):
|
||||
@@ -401,7 +376,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
install_cmd(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
@@ -430,8 +405,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
install_cmd("--no-cache", s.name)
|
||||
|
||||
# Create a buildcache and update index
|
||||
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
# Check package and dependency in buildcache
|
||||
cache_list = buildcache_cmd("list", "--allarch")
|
||||
@@ -443,7 +418,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
with spack.config.override("config:binary_index_ttl", 0):
|
||||
# Check dependency not in buildcache
|
||||
@@ -519,10 +494,10 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -538,7 +513,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.create
|
||||
@@ -12,8 +10,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
|
||||
@@ -173,7 +173,7 @@ def wrapper_environment(working_env):
|
||||
SPACK_DTAGS_TO_ADD="--disable-new-dtags",
|
||||
SPACK_DTAGS_TO_STRIP="--enable-new-dtags",
|
||||
SPACK_COMPILER_FLAGS_KEEP="",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
|
||||
):
|
||||
yield
|
||||
|
||||
@@ -278,8 +278,8 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
ld,
|
||||
test_args,
|
||||
["ld"]
|
||||
+ spack_ldflags
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
@@ -293,10 +293,10 @@ def test_cpp_flags(wrapper_environment, wrapper_flags):
|
||||
cpp,
|
||||
test_args,
|
||||
["cpp"]
|
||||
+ spack_cppflags
|
||||
+ test_include_paths
|
||||
+ test_library_paths
|
||||
+ test_args_without_paths,
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags,
|
||||
)
|
||||
|
||||
|
||||
@@ -306,10 +306,14 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -320,10 +324,13 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cxxflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -334,10 +341,14 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_fflags
|
||||
+ spack_cppflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
|
||||
@@ -46,31 +46,6 @@ def test_import_signing_key(mock_gnupghome):
|
||||
ci.import_signing_key(signing_key)
|
||||
|
||||
|
||||
def test_configure_compilers(mutable_config):
|
||||
def assert_missing(config):
|
||||
assert (
|
||||
"install_missing_compilers" not in config
|
||||
or config["install_missing_compilers"] is False
|
||||
)
|
||||
|
||||
def assert_present(config):
|
||||
assert (
|
||||
"install_missing_compilers" in config and config["install_missing_compilers"] is True
|
||||
)
|
||||
|
||||
original_config = spack.config.get("config")
|
||||
assert_missing(original_config)
|
||||
|
||||
ci.configure_compilers("FIND_ANY", scope="site")
|
||||
|
||||
second_config = spack.config.get("config")
|
||||
assert_missing(second_config)
|
||||
|
||||
ci.configure_compilers("INSTALL_MISSING")
|
||||
last_config = spack.config.get("config")
|
||||
assert_present(last_config)
|
||||
|
||||
|
||||
class FakeWebResponder(object):
|
||||
def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._resp_code = response_code
|
||||
@@ -248,7 +223,7 @@ def test_ci_workarounds():
|
||||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
|
||||
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
@@ -291,7 +266,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
||||
@@ -35,12 +35,15 @@ def test_build_env_requires_a_spec(args):
|
||||
_out_file = "env.out"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_dump(tmpdir):
|
||||
def test_dump(shell_as, shell, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
if sys.platform == "win32":
|
||||
if shell == "pwsh":
|
||||
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
||||
elif shell == "bat":
|
||||
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||
else:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
|
||||
@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
|
||||
add(pkg)
|
||||
install()
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
"""Ensure that buildcache create fails when given no args or
|
||||
environment."""
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
|
||||
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
|
||||
|
||||
tmpdir.chmod(0)
|
||||
with pytest.raises(OSError) as error:
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
|
||||
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
|
||||
assert error.value.errno == errno.EACCES
|
||||
tmpdir.chmod(0o700)
|
||||
|
||||
@@ -159,11 +159,11 @@ def test_update_key_index(
|
||||
# Put installed package in the buildcache, which, because we're signing
|
||||
# it, should result in the public key getting pushed to the buildcache
|
||||
# as well.
|
||||
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache("push", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
# Now make sure that when we pass the "--keys" argument to update-index
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
|
||||
@@ -213,27 +213,25 @@ def verify_mirror_contents():
|
||||
# Install a package and put it in the buildcache
|
||||
s = Spec(out_env_pkg).concretized()
|
||||
install(s.name)
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add(in_env_pkg)
|
||||
install()
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
|
||||
|
||||
# Now run the spack buildcache sync command with all the various options
|
||||
# for specifying mirrors
|
||||
|
||||
# Use urls to specify mirrors
|
||||
buildcache(
|
||||
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
|
||||
)
|
||||
buildcache("sync", src_mirror_url, dest_mirror_url)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
# Use local directory paths to specify fs locations
|
||||
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
|
||||
buildcache("sync", src_mirror_dir, dest_mirror_dir)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
@@ -242,7 +240,7 @@ def verify_mirror_contents():
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
|
||||
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
@@ -260,7 +258,7 @@ def test_buildcache_create_install(
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -324,12 +322,12 @@ def fake_push(node, push_url, options):
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
|
||||
|
||||
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
|
||||
buildcache_create_args = ["create", "--unsigned"]
|
||||
|
||||
if things_to_install != "":
|
||||
buildcache_create_args.extend(["--only", things_to_install])
|
||||
|
||||
buildcache_create_args.extend([slash_hash])
|
||||
buildcache_create_args.extend([str(tmpdir), slash_hash])
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.ci as ci
|
||||
import spack.compilers as compilers
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
@@ -30,7 +29,7 @@
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.spec import Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
@@ -163,8 +162,6 @@ def test_ci_generate_with_env(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -179,9 +176,6 @@ def test_ci_generate_with_env(
|
||||
mirrors:
|
||||
some-mirror: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -221,16 +215,10 @@ def test_ci_generate_with_env(
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
@@ -244,155 +232,6 @@ def test_ci_generate_with_env(
|
||||
assert artifacts_root == "jobs_scratch_dir"
|
||||
|
||||
|
||||
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
||||
"""Validate the needs graph in the generate CI"""
|
||||
|
||||
# TODO: Fix the logic to catch errors where expected packages/needs are not
|
||||
# found.
|
||||
for job_name, job_def in yaml_contents.items():
|
||||
for needs_def_name, needs_list in needs_graph.items():
|
||||
if job_name.startswith(needs_def_name):
|
||||
# check job needs against the expected needs definition
|
||||
j_needs = job_def["needs"]
|
||||
assert all(
|
||||
[
|
||||
job_needs["job"][: job_needs["job"].index("/")] in needs_list
|
||||
for job_needs in j_needs
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[nl in [n["job"][: n["job"].index("/")] for n in j_needs] for nl in needs_list]
|
||||
)
|
||||
assert all([job_needs["artifacts"] == artifacts for job_needs in j_needs])
|
||||
break
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_gcc(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler and use it as the
|
||||
compiler for a spec in the environment"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
|
||||
"(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler when artifacts buildcache
|
||||
is turned on"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
enable-artifacts-buildcache: True
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
|
||||
"(specs) dyninst": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
"(specs) libelf",
|
||||
"(specs) libdwarf",
|
||||
],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, True)
|
||||
|
||||
|
||||
def test_ci_generate_with_env_missing_section(
|
||||
tmpdir,
|
||||
working_env,
|
||||
@@ -889,7 +728,7 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
|
||||
"SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash,
|
||||
"SPACK_JOB_SPEC_PKG_NAME": pkg_name,
|
||||
"SPACK_COMPILER_ACTION": "NONE",
|
||||
"SPACK_CDASH_BUILD_NAME": "(specs) {0}".format(pkg_name),
|
||||
"SPACK_CDASH_BUILD_NAME": pkg_name,
|
||||
"SPACK_REMOTE_MIRROR_URL": rebuild_env.mirror_url,
|
||||
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
||||
"CI_JOB_URL": rebuild_env.ci_job_url,
|
||||
@@ -1055,7 +894,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -1155,8 +994,8 @@ def test_ci_generate_mirror_override(
|
||||
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
@@ -1283,7 +1122,7 @@ def test_push_mirror_contents(
|
||||
found_spec_job = False
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) patchelf" in ci_key:
|
||||
if "patchelf" in ci_key:
|
||||
the_elt = yaml_contents[ci_key]
|
||||
assert "variables" in the_elt
|
||||
job_vars = the_elt["variables"]
|
||||
@@ -1297,7 +1136,7 @@ def test_push_mirror_contents(
|
||||
mirror_cmd("rm", "test-ci")
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url)
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
index_path = os.path.join(buildcache_path, "index.json")
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
@@ -1457,7 +1296,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) a" in ci_key:
|
||||
if ci_key.startswith("a"):
|
||||
# Make sure a's attributes override variables, and all the
|
||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||
# appear twice, but that a's specific extra tag does appear
|
||||
@@ -1477,7 +1316,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "custom main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "custom post step one"
|
||||
if "(specs) dependency-install" in ci_key:
|
||||
if "dependency-install" in ci_key:
|
||||
# Since the dependency-install match omits any
|
||||
# runner-attributes, make sure it inherited all the
|
||||
# top-level attributes.
|
||||
@@ -1495,7 +1334,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "post step one"
|
||||
if "(specs) flatten-deps" in ci_key:
|
||||
if "flatten-deps" in ci_key:
|
||||
# The flatten-deps match specifies that we keep the two
|
||||
# top level variables, but add a third specifc one. It
|
||||
# also adds a custom tag which should be combined with
|
||||
@@ -1554,9 +1393,10 @@ def test_ci_generate_with_workarounds(
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_one = False
|
||||
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs) "):
|
||||
if ci_key not in non_rebuild_keys:
|
||||
found_one = True
|
||||
job_obj = yaml_contents[ci_key]
|
||||
assert "needs" not in job_obj
|
||||
@@ -1613,7 +1453,7 @@ def test_ci_rebuild_index(
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
@@ -1623,140 +1463,6 @@ def test_ci_rebuild_index(
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_prune_dag(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
mock_fetch,
|
||||
mock_archive,
|
||||
mutable_config,
|
||||
monkeypatch,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
ci_base_environment,
|
||||
):
|
||||
"""Test compiler bootstrapping with DAG pruning. Specifically, make
|
||||
sure that if we detect the bootstrapped compiler needs to be rebuilt,
|
||||
we ensure the spec we want to build with that compiler is scheduled
|
||||
for rebuild as well."""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
# Install a compiler, because we want to put it in a buildcache
|
||||
install_cmd("gcc@=12.2.0%gcc@10.2.1")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
assert CompilerSpec("gcc@=12.2.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Configure the mirror where we put that buildcache w/ the compiler
|
||||
mirror_cmd("add", "test-mirror", mirror_url)
|
||||
|
||||
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
|
||||
|
||||
# Put spec built with installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
|
||||
# Now uninstall the spec
|
||||
uninstall_cmd("-y", "b%gcc@12.2.0")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@=12.2.0%gcc@10.2.1
|
||||
specs:
|
||||
- b%gcc@12.2.0
|
||||
mirrors:
|
||||
atestm: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
# Without this monkeypatch, pipeline generation process would think that
|
||||
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
||||
# process sees the compiler as needing a rebuild, which should then result
|
||||
# in the specs built with that compiler needing a rebuild too.
|
||||
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec.name == "gcc":
|
||||
return []
|
||||
else:
|
||||
return [{"spec": spec, "mirror_url": mirror_url}]
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
original_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
# without the monkeypatch, everything appears up to date and no
|
||||
# rebuild jobs are generated.
|
||||
assert original_yaml_contents
|
||||
assert "no-specs-to-rebuild" in original_yaml_contents
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.binary_distribution, "get_mirrors_for_spec", fake_get_mirrors_for_spec
|
||||
)
|
||||
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
new_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
assert new_yaml_contents
|
||||
|
||||
# This 'needs' graph reflects that even though specs 'a' and 'b' do
|
||||
# not otherwise need to be rebuilt (thanks to DAG pruning), they
|
||||
# both end up in the generated pipeline because the compiler they
|
||||
# depend on is bootstrapped, and *does* need to be rebuilt.
|
||||
needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
|
||||
|
||||
_validate_needs_graph(new_yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
|
||||
"""Test that we can detect the change to .gitlab-ci.yml in a
|
||||
mock spack git repo."""
|
||||
@@ -1828,7 +1534,7 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
generated_hashes = []
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs)"):
|
||||
if "variables" in yaml_contents[ci_key]:
|
||||
generated_hashes.append(
|
||||
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
|
||||
)
|
||||
@@ -2240,9 +1946,7 @@ def test_ci_reproduce(
|
||||
ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root)
|
||||
|
||||
target_name = spack.platforms.test.Test.default
|
||||
job_name = ci.get_job_name(
|
||||
"specs", False, job_spec, "test-debian6-%s" % target_name, None
|
||||
)
|
||||
job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None)
|
||||
|
||||
repro_file = os.path.join(working_dir.strpath, "repro.json")
|
||||
repro_details = {
|
||||
@@ -2309,8 +2013,6 @@ def test_cmd_first_line():
|
||||
legacy_spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -2325,9 +2027,6 @@ def test_cmd_first_line():
|
||||
mirrors:
|
||||
test-mirror: file:///some/fake/mirror
|
||||
{0}:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
@@ -2379,16 +2078,10 @@ def test_gitlab_ci_deprecated(
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
|
||||
@@ -3,16 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
graph = SpackCommand("graph")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.usefixtures("mock_packages", "database")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,8 +12,6 @@
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def parser():
|
||||
|
||||
@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
|
||||
|
||||
# Populate the buildcache
|
||||
install(package_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
|
||||
|
||||
# Uninstall the all of the packages for clean slate
|
||||
uninstall("-y", "-a")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -15,8 +13,6 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
||||
@@ -235,7 +235,7 @@ def test_mirror_destroy(
|
||||
|
||||
# Put a binary package in a buildcache
|
||||
install("--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
@@ -245,7 +245,7 @@ def test_mirror_destroy(
|
||||
|
||||
assert not os.path.exists(mirror_dir.strpath)
|
||||
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
|
||||
@@ -157,7 +157,7 @@ def _parse_types(string):
|
||||
|
||||
|
||||
def test_spec_deptypes_nodes():
|
||||
output = spec("--types", "--cover", "nodes", "dt-diamond")
|
||||
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
|
||||
|
||||
|
||||
def test_spec_deptypes_edges():
|
||||
output = spec("--types", "--cover", "edges", "dt-diamond")
|
||||
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
|
||||
@@ -2170,3 +2170,14 @@ def test_concretization_with_compilers_supporting_target_any(self):
|
||||
with spack.config.override("compilers", compiler_configuration):
|
||||
s = spack.spec.Spec("a").concretized()
|
||||
assert s.satisfies("%gcc@12.1.0")
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
|
||||
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
|
||||
"""Tests that information on virtuals is annotated on DAG edges"""
|
||||
spec = default_mock_concretization(spec_str)
|
||||
mpi_provider = spec["mpi"].name
|
||||
|
||||
edges = spec.edges_to_dependencies(name=mpi_provider)
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
@@ -1920,3 +1920,21 @@ def _func(spec_str, tests=False):
|
||||
return concretized_specs_cache[key].copy()
|
||||
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def shell_as(shell):
|
||||
if sys.platform != "win32":
|
||||
yield
|
||||
return
|
||||
if shell not in ("pwsh", "bat"):
|
||||
raise RuntimeError("Shell must be one of supported Windows shells (pwsh|bat)")
|
||||
try:
|
||||
# fetch and store old shell type
|
||||
_shell = os.environ.get("SPACK_SHELL", None)
|
||||
os.environ["SPACK_SHELL"] = shell
|
||||
yield
|
||||
finally:
|
||||
# restore old shell if one was set
|
||||
if _shell:
|
||||
os.environ["SPACK_SHELL"] = _shell
|
||||
|
||||
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
_module_raw() { return 1; };
|
||||
module() { return 1; };
|
||||
ml() { return 1; };
|
||||
export -f _module_raw;
|
||||
export -f module;
|
||||
export -f ml;
|
||||
|
||||
export MODULES_AUTO_HANDLING=1
|
||||
export __MODULES_LMCONFLICT=bar&foo
|
||||
export NEW_VAR=new
|
||||
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
Binary file not shown.
@@ -400,7 +400,7 @@ def test_sanitize_literals(env, exclude, include):
|
||||
({"SHLVL": "1"}, ["SH.*"], [], [], ["SHLVL"]),
|
||||
# Check we can include using a regex
|
||||
({"SHLVL": "1"}, ["SH.*"], ["SH.*"], ["SHLVL"], []),
|
||||
# Check regex to exclude Modules v4 related vars
|
||||
# Check regex to exclude Environment Modules related vars
|
||||
(
|
||||
{"MODULES_LMALTNAME": "1", "MODULES_LMCONFLICT": "2"},
|
||||
["MODULES_(.*)"],
|
||||
@@ -415,6 +415,13 @@ def test_sanitize_literals(env, exclude, include):
|
||||
[],
|
||||
["A_modquar", "b_modquar", "C_modshare"],
|
||||
),
|
||||
(
|
||||
{"__MODULES_LMTAG": "1", "__MODULES_LMPREREQ": "2"},
|
||||
["__MODULES_(.*)"],
|
||||
[],
|
||||
[],
|
||||
["__MODULES_LMTAG", "__MODULES_LMPREREQ"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_sanitize_regex(env, exclude, include, expected, deleted):
|
||||
@@ -489,3 +496,19 @@ def test_exclude_lmod_variables():
|
||||
# Check that variables related to lmod are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("LMOD_") for x in modifications)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.regression("13504")
|
||||
def test_exclude_modules_variables():
|
||||
# Construct the list of environment modifications
|
||||
file = os.path.join(datadir, "sourceme_modules.sh")
|
||||
env = EnvironmentModifications.from_sourcing_file(file)
|
||||
|
||||
# Check that variables related to modules are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("__MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_ml") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_module") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC__module_raw") for x in modifications)
|
||||
|
||||
@@ -167,6 +167,46 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend_path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
)
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/share/man", ":")' in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with append_path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv("MANPATH", "/path/to/man")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if 'setenv("FOO", "{{name}}, {name}, {{}}, {}")' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
|
||||
@@ -37,6 +37,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -46,6 +51,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# The configuration file sets the verbose keyword to False
|
||||
@@ -58,6 +68,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_all")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 5
|
||||
assert len([x for x in content if "module load " in x]) == 5
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -67,6 +82,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
def test_prerequisites_direct(self, modulefile_content, module_configuration):
|
||||
@@ -103,6 +123,7 @@ def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
|
||||
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
|
||||
assert len([x for x in content if "unsetenv BAR" in x]) == 0
|
||||
assert len([x for x in content if "depends-on foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "module load foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
|
||||
|
||||
@@ -121,6 +142,55 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend-path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend-path --delim ":" MANPATH "/path/to/man"' in x])
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in content
|
||||
if 'prepend-path --delim ":" MANPATH "/path/to/share/man"' in x
|
||||
]
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with append-path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert (
|
||||
len([x for x in content if 'append-path --delim ":" MANPATH "/path/to/man"' in x]) == 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv MANPATH "/path/to/man"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if 'setenv FOO "{{name}}, {name}, {{}}, {}"' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -394,10 +464,16 @@ def test_autoload_with_constraints(self, modulefile_content, module_configuratio
|
||||
|
||||
# Test the mpileaks that should have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich2")
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# Test the mpileaks that should NOT have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich")
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 0
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 0
|
||||
assert len([x for x in content if "module load " in x]) == 0
|
||||
|
||||
def test_modules_no_arch(self, factory, module_configuration):
|
||||
|
||||
@@ -100,7 +100,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
parser = argparse.ArgumentParser()
|
||||
buildcache.setup_parser(parser)
|
||||
|
||||
create_args = ["create", "-a", "-f", "-d", mirror_path, pkghash]
|
||||
create_args = ["create", "-a", "-f", mirror_path, pkghash]
|
||||
# Create a private key to sign package with if gpg2 available
|
||||
spack.util.gpg.create(
|
||||
name="test key 1", expires="0", email="spack@googlegroups.com", comment="Spack test key"
|
||||
@@ -116,7 +116,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
install_args = ["install", "-a", "-f", pkghash]
|
||||
install_args = ["install", "-f", pkghash]
|
||||
args = parser.parse_args(install_args)
|
||||
# Test install
|
||||
buildcache.buildcache(parser, args)
|
||||
@@ -131,30 +131,6 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
assert buildinfo["relocate_textfiles"] == ["dummy.txt"]
|
||||
assert buildinfo["relocate_links"] == ["link_to_dummy.txt"]
|
||||
|
||||
# create build cache with relative path
|
||||
create_args.insert(create_args.index("-a"), "-f")
|
||||
create_args.insert(create_args.index("-a"), "-r")
|
||||
args = parser.parse_args(create_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# test overwrite install
|
||||
install_args.insert(install_args.index("-a"), "-f")
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
files = os.listdir(spec.prefix)
|
||||
assert "link_to_dummy.txt" in files
|
||||
assert "dummy.txt" in files
|
||||
# assert os.path.realpath(
|
||||
# os.path.join(spec.prefix, 'link_to_dummy.txt')
|
||||
# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
|
||||
|
||||
args = parser.parse_args(["keys"])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
|
||||
@@ -125,7 +125,7 @@ def _mock_installed(self):
|
||||
# use the installed C. It should *not* force A to use the installed D
|
||||
# *if* we're doing a fresh installation.
|
||||
a_spec = Spec(a)
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
assert spack.version.Version("2") == a_spec[c][d].version
|
||||
assert spack.version.Version("2") == a_spec[e].version
|
||||
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
@@ -989,9 +989,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
|
||||
link_run_spec = Spec("c@=1.0").concretized()
|
||||
build_spec = Spec("c@=2.0").concretized()
|
||||
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run")
|
||||
root.add_dependency_edge(build_spec, deptypes="build")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link", virtuals=())
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run", virtuals=())
|
||||
root.add_dependency_edge(build_spec, deptypes="build", virtuals=())
|
||||
|
||||
# Check dependencies from the perspective of root
|
||||
assert len(root.dependencies()) == 2
|
||||
@@ -1017,7 +1017,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||
root = Spec("b@=2.0").concretized()
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
root.add_dependency_edge(bootstrap, deptypes="build")
|
||||
root.add_dependency_edge(bootstrap, deptypes="build", virtuals=())
|
||||
|
||||
assert len(root.dependencies()) == 1
|
||||
assert root.dependencies()[0].name == "b"
|
||||
@@ -1036,7 +1036,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
for current_deptype in ("build", "link", "run"):
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype, virtuals=())
|
||||
|
||||
# Check edges in dependencies
|
||||
assert len(root.edges_to_dependencies()) == 1
|
||||
@@ -1063,9 +1063,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
|
||||
c1 = Spec("b@=1.0").concretized()
|
||||
c2 = Spec("b@=2.0").concretized()
|
||||
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes)
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes, virtuals=())
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes)
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes, virtuals=())
|
||||
|
||||
|
||||
@pytest.mark.regression("33499")
|
||||
@@ -1084,16 +1084,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
|
||||
z3_flavor_1 = Spec("z3 +through_a1")
|
||||
z3_flavor_2 = Spec("z3 +through_z1")
|
||||
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"), virtuals=())
|
||||
|
||||
# unique package as a dep of a build/run/test type dep.
|
||||
a1.add_dependency_edge(a2, deptypes="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
|
||||
a1.add_dependency_edge(a2, deptypes="all", virtuals=())
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all", virtuals=())
|
||||
|
||||
# chain of link type deps root -> z1 -> z2 -> z3
|
||||
root.add_dependency_edge(z1, deptypes="link")
|
||||
z1.add_dependency_edge(z2, deptypes="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
|
||||
root.add_dependency_edge(z1, deptypes="link", virtuals=())
|
||||
z1.add_dependency_edge(z2, deptypes="link", virtuals=())
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link", virtuals=())
|
||||
|
||||
# Indexing should prefer the link-type dep.
|
||||
assert "through_z1" in root["z3"].variants
|
||||
|
||||
@@ -971,7 +971,7 @@ def test_error_message_unknown_variant(self):
|
||||
def test_satisfies_dependencies_ordered(self):
|
||||
d = Spec("zmpi ^fake")
|
||||
s = Spec("mpileaks")
|
||||
s._add_dependency(d, deptypes=())
|
||||
s._add_dependency(d, deptypes=(), virtuals=())
|
||||
assert s.satisfies("mpileaks ^zmpi ^fake")
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -1018,6 +1018,7 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="malformed"):
|
||||
Spec.from_dict(
|
||||
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
|
||||
@@ -1025,6 +1026,7 @@ def test_malformed_spec_dict():
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="Couldn't parse"):
|
||||
Spec.from_dict(
|
||||
{
|
||||
@@ -1118,7 +1120,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, deptypes=())
|
||||
top.add_dependency_edge(bottom, deptypes=(), virtuals=())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
||||
@@ -43,12 +43,6 @@ def check_json_round_trip(spec):
|
||||
assert spec.eq_dag(spec_from_json)
|
||||
|
||||
|
||||
def test_simple_spec():
|
||||
spec = Spec("mpileaks")
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_read_spec_from_signed_json():
|
||||
spec_dir = os.path.join(spack.paths.test_path, "data", "mirrors", "signed_json")
|
||||
file_name = (
|
||||
@@ -70,13 +64,6 @@ def check_spec(spec_to_check):
|
||||
check_spec(s)
|
||||
|
||||
|
||||
def test_normal_spec(mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
|
||||
)
|
||||
@@ -95,37 +82,28 @@ def test_invalid_json_spec(invalid_json, error_message):
|
||||
assert error_message in exc_msg
|
||||
|
||||
|
||||
def test_external_spec(config, mock_packages):
|
||||
spec = Spec("externaltool")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
spec = Spec("externaltest")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_ambiguous_version_spec(mock_packages):
|
||||
spec = Spec("mpileaks@1.0:5.0,6.1,7.3+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_concrete_spec(config, mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_yaml_multivalue(config, mock_packages):
|
||||
spec = Spec('multivalue-variant foo="bar,baz"')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
@pytest.mark.parametrize(
|
||||
"abstract_spec",
|
||||
[
|
||||
# Externals
|
||||
"externaltool",
|
||||
"externaltest",
|
||||
# Ambiguous version spec
|
||||
"mpileaks@1.0:5.0,6.1,7.3+debug~opt",
|
||||
# Variants
|
||||
"mpileaks+debug~opt",
|
||||
'multivalue-variant foo="bar,baz"',
|
||||
# Virtuals on edges
|
||||
"callpath",
|
||||
"mpileaks",
|
||||
],
|
||||
)
|
||||
def test_roundtrip_concrete_specs(abstract_spec, default_mock_concretization):
|
||||
check_yaml_round_trip(Spec(abstract_spec))
|
||||
check_json_round_trip(Spec(abstract_spec))
|
||||
concrete_spec = default_mock_concretization(abstract_spec)
|
||||
check_yaml_round_trip(concrete_spec)
|
||||
check_json_round_trip(concrete_spec)
|
||||
|
||||
|
||||
def test_yaml_subdag(config, mock_packages):
|
||||
@@ -506,6 +484,8 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
|
||||
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
|
||||
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
|
||||
# Add properties on edges, see https://github.com/spack/spack/pull/34821
|
||||
("specfiles/hdf5.v020.json.gz", "vlirlcgazhvsvtundz4kug75xkkqqgou", spack.spec.SpecfileV4),
|
||||
],
|
||||
)
|
||||
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
|
||||
@@ -19,7 +19,7 @@ def create_dag(nodes, edges):
|
||||
"""
|
||||
specs = {name: Spec(name) for name in nodes}
|
||||
for parent, child, deptypes in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes, virtuals=())
|
||||
return specs
|
||||
|
||||
|
||||
|
||||
@@ -113,13 +113,16 @@ def test_path_put_first(prepare_environment_for_tests):
|
||||
assert envutil.get_path("TEST_ENV_VAR") == expected
|
||||
|
||||
|
||||
def test_dump_environment(prepare_environment_for_tests, tmpdir):
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
def test_dump_environment(prepare_environment_for_tests, shell_as, shell, tmpdir):
|
||||
test_paths = "/a:/b/x:/b/c"
|
||||
os.environ["TEST_ENV_VAR"] = test_paths
|
||||
dumpfile_path = str(tmpdir.join("envdump.txt"))
|
||||
envutil.dump_environment(dumpfile_path)
|
||||
with open(dumpfile_path, "r") as dumpfile:
|
||||
if sys.platform == "win32":
|
||||
if shell == "pwsh":
|
||||
assert "$Env:TEST_ENV_VAR={}\n".format(test_paths) in list(dumpfile)
|
||||
elif shell == "bat":
|
||||
assert 'set "TEST_ENV_VAR={}"\n'.format(test_paths) in list(dumpfile)
|
||||
else:
|
||||
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
||||
@@ -164,11 +167,14 @@ def test_escape_double_quotes_in_shell_modifications():
|
||||
|
||||
to_validate.set("QUOTED_VAR", '"MY_VAL"')
|
||||
|
||||
cmds = to_validate.shell_modifications()
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.platform == "win32":
|
||||
cmds = to_validate.shell_modifications(shell="bat")
|
||||
assert r'set "VAR=$PATH;$ANOTHER_PATH"' in cmds
|
||||
assert r'set "QUOTED_VAR="MY_VAL"' in cmds
|
||||
cmds = to_validate.shell_modifications(shell="pwsh")
|
||||
assert r"$Env:VAR=$PATH;$ANOTHER_PATH" in cmds
|
||||
assert r'$Env:QUOTED_VAR="MY_VAL"' in cmds
|
||||
else:
|
||||
cmds = to_validate.shell_modifications()
|
||||
assert 'export VAR="$PATH:$ANOTHER_PATH"' in cmds
|
||||
assert r'export QUOTED_VAR="\"MY_VAL\""' in cmds
|
||||
else:
|
||||
assert "export VAR=$PATH;$ANOTHER_PATH" in cmds
|
||||
assert r'export QUOTED_VAR="MY_VAL"' in cmds
|
||||
|
||||
@@ -337,15 +337,15 @@ def test_remove_complex_package_logic_filtered():
|
||||
("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
|
||||
("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
|
||||
# has @when("@4.1.0") and raw unicode literals
|
||||
("mfem", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
|
||||
("mfem@4.0.0", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
|
||||
("mfem@4.1.0", "uit2ydzhra3b2mlvnq262qlrqqmuwq3d"),
|
||||
("mfem", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
|
||||
("mfem@4.0.0", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
|
||||
("mfem@4.1.0", "vjdjdgjt6nyo7ited2seki5epggw5gza"),
|
||||
# has @when("@1.5.0:")
|
||||
("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
|
||||
# has a print with multiple arguments
|
||||
("legion", "sffy6vz3dusxnxeetofoomlaieukygoj"),
|
||||
("legion", "efpfd2c4pzhsbyc3o7plqcmtwm6b57yh"),
|
||||
# has nested `with when()` blocks and loops
|
||||
("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
|
||||
],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -34,7 +33,6 @@ def _create_url(relative_url):
|
||||
root_with_fragment = _create_url("index_with_fragment.html")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"depth,expected_found,expected_not_found,expected_text",
|
||||
[
|
||||
@@ -99,20 +97,17 @@ def test_spider_no_response(monkeypatch):
|
||||
assert not pages and not links
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_0():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=0)
|
||||
assert Version("0.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_1():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=1)
|
||||
assert Version("0.0.0") in versions
|
||||
assert Version("1.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -120,14 +115,12 @@ def test_find_versions_of_archive_2():
|
||||
assert Version("2.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
# up for grabs to make this better.
|
||||
assert Version("2.0.0b2") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -137,7 +130,6 @@ def test_find_versions_of_archive_3():
|
||||
assert Version("4.5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("2.0.0b2") in versions
|
||||
@@ -145,7 +137,6 @@ def test_find_exotic_versions_of_archive_3():
|
||||
assert Version("4.5-rc5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_with_fragment():
|
||||
versions = spack.util.web.find_versions_of_archive(
|
||||
root_tarball, root_with_fragment, list_depth=0
|
||||
@@ -206,7 +197,6 @@ def test_etag_parser():
|
||||
assert spack.util.web.parse_etag("abc def") is None
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_list_url(tmpdir):
|
||||
testpath = str(tmpdir)
|
||||
testpath_url = url_util.path_to_file_url(testpath)
|
||||
|
||||
@@ -211,7 +211,9 @@ def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visit
|
||||
def with_artificial_edges(specs):
|
||||
"""Initialize a list of edges from an imaginary root node to the root specs."""
|
||||
return [
|
||||
EdgeAndDepth(edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=()), depth=0)
|
||||
EdgeAndDepth(
|
||||
edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=(), virtuals=()), depth=0
|
||||
)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
"csh": "setenv {0} {1};\n",
|
||||
"fish": "set -gx {0} {1};\n",
|
||||
"bat": 'set "{0}={1}"\n',
|
||||
"pwsh": "$Env:{0}={1}\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +56,7 @@
|
||||
"csh": "unsetenv {0};\n",
|
||||
"fish": "set -e {0};\n",
|
||||
"bat": 'set "{0}="\n',
|
||||
"pwsh": "Remove-Item Env:{0}\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -172,7 +174,9 @@ def path_put_first(var_name: str, directories: List[Path]):
|
||||
|
||||
|
||||
def _win_env_var_to_set_line(var: str, val: str) -> str:
|
||||
return f'set "{var}={val}"'
|
||||
is_pwsh = os.environ.get("SPACK_SHELL", None) == "pwsh"
|
||||
env_set_phrase = f"$Env:{var}={val}" if is_pwsh else f'set "{var}={val}"'
|
||||
return env_set_phrase
|
||||
|
||||
|
||||
def _nix_env_var_to_source_line(var: str, val: str) -> str:
|
||||
@@ -351,13 +355,20 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
|
||||
|
||||
class SetEnv(NameValueModifier):
|
||||
__slots__ = ("force",)
|
||||
__slots__ = ("force", "raw")
|
||||
|
||||
def __init__(
|
||||
self, name: str, value: str, *, trace: Optional[Trace] = None, force: bool = False
|
||||
self,
|
||||
name: str,
|
||||
value: str,
|
||||
*,
|
||||
trace: Optional[Trace] = None,
|
||||
force: bool = False,
|
||||
raw: bool = False,
|
||||
):
|
||||
super().__init__(name, value, trace=trace)
|
||||
self.force = force
|
||||
self.raw = raw
|
||||
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"SetEnv: {self.name}={str(self.value)}", level=3)
|
||||
@@ -501,15 +512,16 @@ def _trace(self) -> Optional[Trace]:
|
||||
return Trace(filename=filename, lineno=lineno, context=current_context)
|
||||
|
||||
@system_env_normalize
|
||||
def set(self, name: str, value: str, *, force: bool = False):
|
||||
def set(self, name: str, value: str, *, force: bool = False, raw: bool = False):
|
||||
"""Stores a request to set an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable
|
||||
value: value of the environment variable
|
||||
force: if True, audit will not consider this modification a warning
|
||||
raw: if True, format of value string is skipped
|
||||
"""
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force)
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force, raw=raw)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
@@ -685,7 +697,7 @@ def apply_modifications(self, env: Optional[MutableMapping[str, str]] = None):
|
||||
|
||||
def shell_modifications(
|
||||
self,
|
||||
shell: str = "sh",
|
||||
shell: str = "sh" if sys.platform != "win32" else os.environ.get("SPACK_SHELL", "bat"),
|
||||
explicit: bool = False,
|
||||
env: Optional[MutableMapping[str, str]] = None,
|
||||
) -> str:
|
||||
@@ -768,16 +780,21 @@ def from_sourcing_file(
|
||||
"PS1",
|
||||
"PS2",
|
||||
"ENV",
|
||||
# Environment modules v4
|
||||
# Environment Modules or Lmod
|
||||
"LOADEDMODULES",
|
||||
"_LMFILES_",
|
||||
"BASH_FUNC_module()",
|
||||
"MODULEPATH",
|
||||
"MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod configuration
|
||||
r"LMOD_(.*)",
|
||||
"MODULERCFILE",
|
||||
"BASH_FUNC_ml()",
|
||||
"BASH_FUNC_module()",
|
||||
# Environment Modules-specific configuration
|
||||
"MODULESHOME",
|
||||
"BASH_FUNC__module_raw()",
|
||||
r"MODULES_(.*)",
|
||||
r"__MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod-specific configuration
|
||||
r"LMOD_(.*)",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -87,28 +87,11 @@ def __init__(self, py_ver_consistent=False, _avoid_backslashes=False):
|
||||
|
||||
Arguments:
|
||||
py_ver_consistent (bool): if True, generate unparsed code that is
|
||||
consistent between Python 2.7 and 3.5-3.10.
|
||||
|
||||
Consistency is achieved by:
|
||||
1. Ensuring that *args and **kwargs are always the last arguments,
|
||||
regardless of the python version, because Python 2's AST does not
|
||||
have sufficient information to reconstruct star-arg order.
|
||||
2. Always unparsing print as a function.
|
||||
3. Unparsing Python3 unicode literals the way Python 2 would.
|
||||
|
||||
Without these changes, the same source can generate different code for Python 2
|
||||
and Python 3, depending on subtle AST differences. The first of these two
|
||||
causes this module to behave differently from Python 3.8+'s `ast.unparse()`
|
||||
|
||||
One place where single source will generate an inconsistent AST is with
|
||||
multi-argument print statements, e.g.::
|
||||
|
||||
print("foo", "bar", "baz")
|
||||
|
||||
In Python 2, this prints a tuple; in Python 3, it is the print function with
|
||||
multiple arguments. Use ``from __future__ import print_function`` to avoid
|
||||
this inconsistency.
|
||||
consistent between Python versions 3.5-3.11.
|
||||
|
||||
For legacy reasons, consistency is achieved by unparsing Python3 unicode literals
|
||||
the way Python 2 would. This preserved Spack package hash consistency during the
|
||||
python2/3 transition
|
||||
"""
|
||||
self.future_imports = []
|
||||
self._indent = 0
|
||||
@@ -299,61 +282,6 @@ def visit_Exec(self, node):
|
||||
self.write(", ")
|
||||
self.dispatch(node.locals)
|
||||
|
||||
def visit_Print(self, node):
|
||||
# Use print function so that python 2 unparsing is consistent with 3
|
||||
if self._py_ver_consistent:
|
||||
self.fill("print")
|
||||
with self.delimit("(", ")"):
|
||||
values = node.values
|
||||
|
||||
# Can't tell print(foo, bar, baz) and print((foo, bar, baz)) apart in
|
||||
# python 2 and 3, so treat them the same to make hashes consistent.
|
||||
# Single-tuple print are rare and unlikely to affect package hashes,
|
||||
# esp. as they likely print to stdout.
|
||||
if len(values) == 1 and isinstance(values[0], ast.Tuple):
|
||||
values = node.values[0].elts
|
||||
|
||||
do_comma = False
|
||||
for e in values:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if not node.nl:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.write("end=''")
|
||||
|
||||
if node.dest:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.write("file=")
|
||||
self.dispatch(node.dest)
|
||||
|
||||
else:
|
||||
# unparse Python 2 print statements
|
||||
self.fill("print ")
|
||||
|
||||
do_comma = False
|
||||
if node.dest:
|
||||
self.write(">>")
|
||||
self.dispatch(node.dest)
|
||||
do_comma = True
|
||||
for e in node.values:
|
||||
if do_comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
do_comma = True
|
||||
self.dispatch(e)
|
||||
if not node.nl:
|
||||
self.write(",")
|
||||
|
||||
def visit_Global(self, node):
|
||||
self.fill("global ")
|
||||
interleave(lambda: self.write(", "), self.write, node.names)
|
||||
@@ -962,65 +890,28 @@ def visit_Call(self, node):
|
||||
self.set_precedence(_Precedence.ATOM, node.func)
|
||||
|
||||
args = node.args
|
||||
if self._py_ver_consistent:
|
||||
# make print(a, b, c) and print((a, b, c)) equivalent, since you can't
|
||||
# tell them apart between Python 2 and 3. See _Print() for more details.
|
||||
if getattr(node.func, "id", None) == "print":
|
||||
if len(node.args) == 1 and isinstance(node.args[0], ast.Tuple):
|
||||
args = node.args[0].elts
|
||||
|
||||
self.dispatch(node.func)
|
||||
|
||||
with self.delimit("(", ")"):
|
||||
comma = False
|
||||
|
||||
# starred arguments last in Python 3.5+, for consistency w/earlier versions
|
||||
star_and_kwargs = []
|
||||
move_stars_last = sys.version_info[:2] >= (3, 5)
|
||||
# NOTE: this code is no longer compatible with python versions 2.7:3.4
|
||||
# If you run on python@:3.4, you will see instability in package hashes
|
||||
# across python versions
|
||||
|
||||
for e in args:
|
||||
if move_stars_last and isinstance(e, ast.Starred):
|
||||
star_and_kwargs.append(e)
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
for e in node.keywords:
|
||||
# starting from Python 3.5 this denotes a kwargs part of the invocation
|
||||
if e.arg is None and move_stars_last:
|
||||
star_and_kwargs.append(e)
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if move_stars_last:
|
||||
for e in star_and_kwargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
if sys.version_info[:2] < (3, 5):
|
||||
if node.starargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.write("*")
|
||||
self.dispatch(node.starargs)
|
||||
if node.kwargs:
|
||||
if comma:
|
||||
self.write(", ")
|
||||
else:
|
||||
comma = True
|
||||
self.write("**")
|
||||
self.dispatch(node.kwargs)
|
||||
comma = True
|
||||
self.dispatch(e)
|
||||
|
||||
def visit_Subscript(self, node):
|
||||
self.set_precedence(_Precedence.ATOM, node.value)
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from html.parser import HTMLParser
|
||||
from pathlib import Path, PurePosixPath
|
||||
from urllib.error import URLError
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
@@ -498,7 +499,8 @@ def list_url(url, recursive=False):
|
||||
|
||||
if local_path:
|
||||
if recursive:
|
||||
return list(_iter_local_prefix(local_path))
|
||||
# convert backslash to forward slash as required for URLs
|
||||
return [str(PurePosixPath(Path(p))) for p in list(_iter_local_prefix(local_path))]
|
||||
return [
|
||||
subpath
|
||||
for subpath in os.listdir(local_path)
|
||||
@@ -738,7 +740,8 @@ def find_versions_of_archive(
|
||||
|
||||
# We'll be a bit more liberal and just look for the archive
|
||||
# part, not the full path.
|
||||
url_regex = os.path.basename(url_regex)
|
||||
# this is a URL so it is a posixpath even on Windows
|
||||
url_regex = PurePosixPath(url_regex).name
|
||||
|
||||
# We need to add a / to the beginning of the regex to prevent
|
||||
# Spack from picking up similarly named packages like:
|
||||
|
||||
@@ -744,7 +744,7 @@ deprecated-ci-build:
|
||||
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ postinstall.sh
|
||||
- diff postinstall.sh postinstall.sh.bkp || echo Done
|
||||
- /bin/bash postinstall.sh -fg
|
||||
- spack config --scope site add "packages:all:target:\"${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
after_script:
|
||||
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ spack:
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
|
||||
@@ -50,7 +50,7 @@ spack:
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
|
||||
@@ -50,7 +50,7 @@ spack:
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
|
||||
@@ -48,7 +48,7 @@ spack:
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
|
||||
@@ -60,9 +60,9 @@ spack:
|
||||
- [$^paraview_specs]
|
||||
- - ^hdf5@1.14 # Non-VisIt can build HDF5 1.14
|
||||
# Test ParaView builds with differnt GL backends
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
- [$^visit_specs]
|
||||
# - matrix:
|
||||
# - [$sdk_base_spec]
|
||||
# - [$^visit_specs]
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/data-vis-sdk" }
|
||||
|
||||
|
||||
@@ -121,9 +121,7 @@ spack:
|
||||
- py-jupyterhub
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- py-warpx ^warpx dims=2
|
||||
- py-warpx ^warpx dims=3
|
||||
- py-warpx ^warpx dims=rz
|
||||
- py-warpx
|
||||
- qthreads scheduler=distrib
|
||||
- quantum-espresso
|
||||
- raja
|
||||
@@ -163,7 +161,7 @@ spack:
|
||||
- hpx max_cpu_count=512 +cuda
|
||||
- hypre +cuda
|
||||
- kokkos +wrapper +cuda
|
||||
- kokkos-kernels +cuda ^kokkos +wrapper +cuda
|
||||
- kokkos-kernels +cuda ^kokkos +wrapper +cuda +cuda_lambda
|
||||
- magma +cuda
|
||||
- mfem +cuda
|
||||
- mgard +serial +openmp +timing +unstructured +cuda
|
||||
@@ -187,6 +185,7 @@ spack:
|
||||
# - archer # llvm@8
|
||||
# - bricks # bricks
|
||||
# - geopm # geopm
|
||||
# - hdf5-vol-daos # hdf5-vol-daos: vhost/vhost_user.c:65:32: error: array size missing in 'vhost_message_handlers'
|
||||
# - loki # loki
|
||||
# - precice # precice
|
||||
# - pruners-ninja # pruners-ninja
|
||||
|
||||
@@ -128,9 +128,7 @@ spack:
|
||||
- py-jupyterhub
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- py-warpx ^warpx dims=2
|
||||
- py-warpx ^warpx dims=3
|
||||
- py-warpx ^warpx dims=rz
|
||||
- py-warpx
|
||||
- qthreads scheduler=distrib
|
||||
- quantum-espresso
|
||||
- raja
|
||||
@@ -175,7 +173,7 @@ spack:
|
||||
- hpx max_cpu_count=512 +cuda
|
||||
- hypre +cuda
|
||||
- kokkos +wrapper +cuda
|
||||
- kokkos-kernels +cuda ^kokkos +wrapper +cuda
|
||||
- kokkos-kernels +cuda ^kokkos +wrapper +cuda +cuda_lambda
|
||||
- magma +cuda
|
||||
- mfem +cuda
|
||||
- mgard +serial +openmp +timing +unstructured +cuda
|
||||
@@ -225,13 +223,14 @@ spack:
|
||||
- upcxx +rocm
|
||||
|
||||
# CPU failures
|
||||
#- geopm # /usr/include/x86_64-linux-gnu/bits/string_fortified.h:95:10: error:'__builtin_strncpy' specified bound 512 equals destination size [-Werror=stringop-truncation]
|
||||
#- loki # ../include/loki/Singleton.h:158:14: warning: 'template<class> class std::auto_ptr' is deprecated: use 'std::unique_ptr' instead [-Wdeprecated-declarations]
|
||||
#- pruners-ninja # test/ninja_test_util.c:34: multiple definition of `a';
|
||||
#- rempi # rempi_message_manager.h:53:3: error: 'string' does not name a type
|
||||
# - geopm # /usr/include/x86_64-linux-gnu/bits/string_fortified.h:95:10: error:'__builtin_strncpy' specified bound 512 equals destination size [-Werror=stringop-truncation]
|
||||
# - hdf5-vol-daos # hdf5-vol-daos: vhost/vhost_user.c:65:32: error: array size missing in 'vhost_message_handlers'
|
||||
# - loki # ../include/loki/Singleton.h:158:14: warning: 'template<class> class std::auto_ptr' is deprecated: use 'std::unique_ptr' instead [-Wdeprecated-declarations]
|
||||
# - pruners-ninja # test/ninja_test_util.c:34: multiple definition of `a';
|
||||
# - rempi # rempi_message_manager.h:53:3: error: 'string' does not name a type
|
||||
|
||||
# CUDA failures
|
||||
#- parsec +cuda # parsec/mca/device/cuda/transfer.c:168: multiple definition of `parsec_CUDA_d2h_max_flows';
|
||||
# - parsec +cuda # parsec/mca/device/cuda/transfer.c:168: multiple definition of `parsec_CUDA_d2h_max_flows';
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s" }
|
||||
|
||||
|
||||
@@ -46,6 +46,10 @@ if ($null -eq $Env:EDITOR)
|
||||
$Env:EDITOR = "notepad"
|
||||
}
|
||||
|
||||
# Set spack shell so we can detect powershell context
|
||||
$Env:SPACK_SHELL="pwsh"
|
||||
|
||||
doskey /exename=powershell.exe spack=$Env:SPACK_ROOT\bin\spack.ps1 $args
|
||||
|
||||
Write-Output "*****************************************************************"
|
||||
Write-Output "**************** Spack Package Manager **************************"
|
||||
|
||||
@@ -498,7 +498,7 @@ _spack_buildcache() {
|
||||
_spack_buildcache_push() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force -u --unsigned -a --allow-root -k --key --update-index --rebuild-index --spec-file --only"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -507,7 +507,7 @@ _spack_buildcache_push() {
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force -u --unsigned -a --allow-root -k --key --update-index --rebuild-index --spec-file --only"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -516,7 +516,7 @@ _spack_buildcache_create() {
|
||||
_spack_buildcache_install() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch"
|
||||
SPACK_COMPREPLY="-h --help -f --force -m --multiple -u --unsigned -o --otherarch"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -563,7 +563,7 @@ _spack_buildcache_save_specfile() {
|
||||
_spack_buildcache_sync() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --manifest-glob --src-directory --src-mirror-name --src-mirror-url --dest-directory --dest-mirror-name --dest-mirror-url"
|
||||
SPACK_COMPREPLY="-h --help --manifest-glob"
|
||||
else
|
||||
SPACK_COMPREPLY=""
|
||||
fi
|
||||
@@ -572,7 +572,7 @@ _spack_buildcache_sync() {
|
||||
_spack_buildcache_update_index() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
|
||||
SPACK_COMPREPLY="-h --help -k --keys"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -581,7 +581,7 @@ _spack_buildcache_update_index() {
|
||||
_spack_buildcache_rebuild_index() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
|
||||
SPACK_COMPREPLY="-h --help -k --keys"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -935,7 +935,7 @@ _spack_env() {
|
||||
_spack_env_activate() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat -v --with-view -V --without-view -p --prompt --temp -d --dir"
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh -v --with-view -V --without-view -p --prompt --temp -d --dir"
|
||||
else
|
||||
_environments
|
||||
fi
|
||||
@@ -1695,7 +1695,7 @@ _spack_restage() {
|
||||
_spack_solve() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
|
||||
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1704,7 +1704,7 @@ _spack_solve() {
|
||||
_spack_spec() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
|
||||
@@ -37,7 +37,7 @@ RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \
|
||||
|
||||
# Modifications to the environment that are necessary to run
|
||||
RUN cd {{ paths.environment }} && \
|
||||
spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh
|
||||
spack env activate --sh -d . > activate.sh
|
||||
|
||||
{% if extra_instructions.build %}
|
||||
{{ extra_instructions.build }}
|
||||
@@ -53,7 +53,13 @@ COPY --from=builder {{ paths.environment }} {{ paths.environment }}
|
||||
COPY --from=builder {{ paths.store }} {{ paths.store }}
|
||||
COPY --from=builder {{ paths.hidden_view }} {{ paths.hidden_view }}
|
||||
COPY --from=builder {{ paths.view }} {{ paths.view }}
|
||||
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
|
||||
|
||||
RUN { \
|
||||
echo '#!/bin/sh' \
|
||||
&& echo '.' {{ paths.environment }}/activate.sh \
|
||||
&& echo 'exec "$@"'; \
|
||||
} > /entrypoint.sh \
|
||||
&& chmod a+x /entrypoint.sh
|
||||
|
||||
{% block final_stage %}
|
||||
|
||||
@@ -70,6 +76,6 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \
|
||||
{% for label, value in labels.items() %}
|
||||
LABEL "{{ label }}"="{{ value }}"
|
||||
{% endfor %}
|
||||
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l", "-c", "$*", "--" ]
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
CMD [ "/bin/bash" ]
|
||||
{% endif %}
|
||||
|
||||
@@ -84,6 +84,10 @@ setenv("{{ cmd.name }}", "{{ cmd.value }}")
|
||||
unsetenv("{{ cmd.name }}")
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #}
|
||||
{% if has_manpath_modifications %}
|
||||
append_path("MANPATH", "", ":")
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block footer %}
|
||||
|
||||
@@ -26,9 +26,17 @@ proc ModulesHelp { } {
|
||||
{% endblock %}
|
||||
|
||||
{% block autoloads %}
|
||||
{% if autoload|length > 0 %}
|
||||
if {![info exists ::env(LMOD_VERSION_MAJOR)]} {
|
||||
{% for module in autoload %}
|
||||
module load {{ module }}
|
||||
module load {{ module }}
|
||||
{% endfor %}
|
||||
} else {
|
||||
{% for module in autoload %}
|
||||
depends-on {{ module }}
|
||||
{% endfor %}
|
||||
}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{# #}
|
||||
{% block prerequisite %}
|
||||
@@ -58,6 +66,10 @@ unsetenv {{ cmd.name }}
|
||||
{% endif %}
|
||||
{# #}
|
||||
{% endfor %}
|
||||
{# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #}
|
||||
{% if has_manpath_modifications %}
|
||||
append-path --delim ":" MANPATH ""
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block footer %}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathAppend(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-append-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.append_path("MANPATH", "/path/to/man")
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathPrepend(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-prepend-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.prepend_path("MANPATH", "/path/to/man")
|
||||
env.prepend_path("MANPATH", "/path/to/share/man")
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathSetenv(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-setenv-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("MANPATH", "/path/to/man")
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleSetenvRaw(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-setenv-raw-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("FOO", "{{name}}, {name}, {{}}, {}", raw=True)
|
||||
@@ -27,7 +27,8 @@ class Abinit(AutotoolsPackage):
|
||||
homepage = "https://www.abinit.org/"
|
||||
url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz"
|
||||
|
||||
version("9.8.3", sha256="65fb93217336a72d1554cc6991127203958cc7df59921782251a86569e33a357")
|
||||
version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a")
|
||||
version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b")
|
||||
version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6")
|
||||
version("9.4.2", sha256="d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc")
|
||||
version("8.10.3", sha256="ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75")
|
||||
|
||||
@@ -70,40 +70,6 @@
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_AmazonLinux-2023_aarch64.tar",
|
||||
),
|
||||
},
|
||||
"23.04": {
|
||||
"RHEL-7": (
|
||||
"6526218484e87c195c1145f60536552fabbd25ba98c05cf096f54de18381a422",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_RHEL-7_aarch64.tar",
|
||||
),
|
||||
"RHEL-8": (
|
||||
"e658c9d85693cc818f2be9942d8aa71465a84e00046d6f8da72c46a76cc8a747",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_RHEL-8_aarch64.tar",
|
||||
),
|
||||
"RHEL-9": (
|
||||
"b71431a16e09ae910737f920aab9c720b5ec83586dba8041b0daa45fa13521d1",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_RHEL-9_aarch64.tar",
|
||||
),
|
||||
"SLES-15": (
|
||||
"5dc880272942f5ac2cad7556bdbdf177b62a0736061c1acb1c80ca51ccaba3be",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_SLES-15_aarch64.tar",
|
||||
),
|
||||
"Ubuntu-20.04": (
|
||||
"a0b3bcec541a1e78b1a48d6fa876cc0ef2846f40219c95c60ab9852882ee05d2",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_Ubuntu-20.04_aarch64.tar",
|
||||
),
|
||||
"Ubuntu-22.04": (
|
||||
"10cf29da14830b3a9f0f51cda893e4255ffd1093297a71886865f97958d100f7",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_Ubuntu-22.04_aarch64.tar",
|
||||
),
|
||||
"AmazonLinux-2": (
|
||||
"65637a34abd076906bcbd56f2a7861ec873bc8d62e321217ade6008939a0bf6b",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_AmazonLinux-2_aarch64.tar",
|
||||
),
|
||||
"AmazonLinux-2023": (
|
||||
"415f8e908baf550e92ef21d4146904fac0a339132cb7921b4046e47ac71cf4c9",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04/arm-compiler-for-linux_23.04_AmazonLinux-2023_aarch64.tar",
|
||||
),
|
||||
},
|
||||
"22.1": {
|
||||
"RHEL-7": (
|
||||
"367b9a60fa13b5fcf2fa787122c12d4bfb14d6f3e3e7b0460efc7627484a56a4",
|
||||
|
||||
@@ -391,4 +391,8 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
cxxstd = spec["root"].variants["cxxstd"].value
|
||||
args.append("-DCMAKE_CXX_STANDARD={0}".format(cxxstd))
|
||||
|
||||
if "+python" in spec:
|
||||
python = spec["python"].command.path
|
||||
args.append("-DPython_EXECUTABLE={0}".format(python))
|
||||
|
||||
return args
|
||||
|
||||
@@ -16,6 +16,7 @@ class Alglib(MakefilePackage):
|
||||
homepage = "https://www.alglib.net/"
|
||||
url = "https://www.alglib.net/translator/re/alglib-3.11.0.cpp.gpl.tgz"
|
||||
|
||||
version("4.00.0", sha256="827b5f559713a3e8c7c1452ed1ffd5227adb9622d1a165ceb70c117c8ed3ccb4")
|
||||
version("3.20.0", sha256="e7357f0f894313ff1b640ec9cb5e8b63f06d2d3411c2143a374aa0e9740da8a9")
|
||||
version("3.11.0", sha256="34e391594aac89fb354bdaf58c42849489cd1199197398ba98bb69961f42bdb0")
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ class Apptainer(SingularityBase):
|
||||
git = "https://github.com/apptainer/apptainer.git"
|
||||
|
||||
version("main", branch="main")
|
||||
version("1.1.9", sha256="c615777539154288542cf393d3fd44c04ccb3260bc6330dc324d4e4ebe902bfa")
|
||||
version("1.1.7", sha256="e6d3956a26c3965703402e17f153ba07f59bf710068806462b314d2d04e825e7")
|
||||
version("1.1.6", sha256="5f32d305279a51ce8bdbe69e733c4ac12b1efdcb77758fab8ec9463e96a8fd82")
|
||||
version("1.1.5", sha256="3eadb26b6656a89a111abe29c7e50eab0023e9a8718f1e77e46ca871398bfa67")
|
||||
@@ -35,6 +36,7 @@ class Apptainer(SingularityBase):
|
||||
version("1.0.2", sha256="2d7a9d0a76d5574459d249c3415e21423980d9154ce85e8c34b0600782a7dfd3")
|
||||
|
||||
depends_on("go@1.17.5:", when="@1.1.0:")
|
||||
depends_on("squashfuse", type="run")
|
||||
|
||||
singularity_org = "apptainer"
|
||||
singularity_name = "apptainer"
|
||||
|
||||
@@ -80,50 +80,6 @@
|
||||
"RHEL-7": ("7b2239b2ce5315e1be14dbd8fe15aff2d3b07968d64b5c80c8ab57140b6a17a8"),
|
||||
"AmazonLinux-2": ("a2e0f176df627c50f851924ac57994f582f63b0f3d42ad0b65c915ea04dc0467"),
|
||||
},
|
||||
"23.04_gcc-12.2": {
|
||||
"RHEL-7": ("e159f84f14d885aa5e47ca17c16ef3d95128f834a655827bf6b48fcf8d6ec459"),
|
||||
"RHEL-8": ("6ac1974ec9bd814d3a4eecf330cefd67cf2c878f026a8b04bc2928368948671a"),
|
||||
"RHEL-9": ("71fc8ac9e48ec531b24d5641481070daa8561a5c0373dfbd8915847c8b45641c"),
|
||||
"SLES-15": ("b26a22123294c161519dce398869626e6a793ce94b76648cce46b0111d17d215"),
|
||||
"Ubuntu-20.04": ("c410e1aebb1465f8ae7b264fa74f847137e3c1d9f28c1a9332bf69c59b6f9eec"),
|
||||
"Ubuntu-22.04": ("9f9179336d14ea253462701c96c13d0b01b7c8a09a3fb987c0b22ad647598410"),
|
||||
"AmazonLinux-2": ("3ef5556d7ac5a269af432eb24ed25d2504bcd0070f8b899231f2a45cea6f1a1d"),
|
||||
"AmazonLinux-2023": ("037c70ffdcbdf152ddcb7a4f9e1e1acb7c6cd7ea7337966a2b101e684493653d"),
|
||||
},
|
||||
"23.04_gcc-11.3": {
|
||||
"RHEL-7": ("75f53d160c2254db38f670e86b5381b6d4d778cb0325bfc6fed772c1a8f4c7d7"),
|
||||
"RHEL-8": ("3d10bd5d3b82a2affcea94fdb235eb046c622abf82306682aabdf3abd9e8fc4e"),
|
||||
"RHEL-9": ("df3a7c3571af2e6c3e071cf88afdc3352cd32428c22859d833e68afb3d39b4de"),
|
||||
"SLES-15": ("6dda1320be97d4e5bd2f192b8c6743a6975aab17499554d5506506a620f27e04"),
|
||||
"Ubuntu-20.04": ("57cc91b3b43fd663f827f5a2f3e2b3bddf76ea0f80d95e227a95d961fae64b5d"),
|
||||
"Ubuntu-22.04": ("aa56ea09ded38e17bf26ea449b5d45fb112b0feb34a5408c629fe030011c83a4"),
|
||||
"AmazonLinux-2": ("26494f69237eaf123356b41265f6531d8049dd7aa04898e7b3cd3e8d67d2f25a"),
|
||||
"AmazonLinux-2023": ("30a1a7d14b567c2e68b11cf40890d09899d4cb5fdc3605b9a0c271867ff0f4b9"),
|
||||
},
|
||||
"23.04_gcc-10.2": {
|
||||
"RHEL-7": ("08bb8a78dc27989e044f5154744355699e2896d60ae8838b0e6ce4318a9462ab"),
|
||||
"RHEL-8": ("29415b6d6bce9176aa372ac794769541775573f8302e801a4bf5cdfad5b07851"),
|
||||
"SLES-15": ("aa3ea267a4d02a9c01fa9ef9fd5ac9e0b8687bac8b19047b52ea57ca587ee838"),
|
||||
"Ubuntu-20.04": ("d68a66d85e4cd7e99b66308fe563a623b902e49cb01a2ba0342f575beb192a55"),
|
||||
"AmazonLinux-2": ("a9eded299ea08ba32c91b6e99e1fe24240fcbee0a60696d1dba6d20b2bb39d5c"),
|
||||
},
|
||||
"23.04_gcc-9.3": {
|
||||
"RHEL-7": ("0550fafeecfd8a23500f8fdd3cddf49ed574ae274894f2ca7935a69618dbd418"),
|
||||
"RHEL-8": ("d041b3b8ffd4752f0180999bdfc3c6764659097f0b042482bc1058d4c9a504cb"),
|
||||
"SLES-15": ("dcf8107423dc54068765a3e5f39bfed18df1c9662743f93c785554d0a223a727"),
|
||||
"Ubuntu-20.04": ("577dc2ceb34f1e0aaf3e28953ed406de02e3e245eeafd7f9d6666866509fb1a1"),
|
||||
"AmazonLinux-2": ("50df0ee6067c2d3b888545beaa34936bb3b8a8736d52535badf2ab48acc866e2"),
|
||||
},
|
||||
"23.04_gcc-8.2": {
|
||||
"RHEL-7": ("a579d3790c60ff05952cf4f778089843555413686037ab8b743c2756df8c4070"),
|
||||
"RHEL-8": ("6c90eb65cdd472a16eaebbcd7455840048814ced5986f31a0daaf10f9a2d33fd"),
|
||||
"SLES-15": ("9c3a8323f47db78128751ae74fb3dea11b6237d8433aae0c6e2c8e7951c3ea00"),
|
||||
"AmazonLinux-2": ("02819c958976530e6363d629c5b6d5097892eef5ee4904e27b25e18f52e08c17"),
|
||||
},
|
||||
"23.04_gcc-7.5": {
|
||||
"RHEL-7": ("2141340617d9a053ed03f9da5b1a787bc96bf065eceee3bf350184a4d3fd8a44"),
|
||||
"AmazonLinux-2": ("27dfc5f52e6c44a84d99b6d0841e526ea9ff5878262094760c3c82a6863eceb4"),
|
||||
},
|
||||
"22.1_gcc-11.2": {
|
||||
"RHEL-7": ("9ce7858525109cca8f4e1d533113b6410d55f10cc4db16c4742562da87a32f2b"),
|
||||
"RHEL-8": ("24f9f4496e41c2314d4ace25b6e3d63127bd586ff7bdd8a732471cbc65a8023e"),
|
||||
@@ -254,7 +210,7 @@ class ArmplGcc(Package):
|
||||
high-performance computing applications on Arm processors."""
|
||||
|
||||
homepage = "https://developer.arm.com/tools-and-software/server-and-hpc/downloads/arm-performance-libraries"
|
||||
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/22-1/ubuntu-20/arm-performance-libraries_22.1_Ubuntu-20.04_gcc-11.2.tar"
|
||||
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/23-04-1/ubuntu-22/arm-performance-libraries_23.04.1_Ubuntu-22.04_gcc-12.2.tar"
|
||||
|
||||
maintainers("annop-w")
|
||||
|
||||
@@ -276,13 +232,6 @@ class ArmplGcc(Package):
|
||||
conflicts("%gcc@:7", when="@23.04.1_gcc-8.2")
|
||||
conflicts("%gcc@:6", when="@23.04.1_gcc-7.5")
|
||||
|
||||
conflicts("%gcc@:11", when="@23.04_gcc-12.2")
|
||||
conflicts("%gcc@:10", when="@23.04_gcc-11.3")
|
||||
conflicts("%gcc@:9", when="@23.04_gcc-10.2")
|
||||
conflicts("%gcc@:8", when="@23.04_gcc-9.3")
|
||||
conflicts("%gcc@:7", when="@23.04_gcc-8.2")
|
||||
conflicts("%gcc@:6", when="@23.04_gcc-7.5")
|
||||
|
||||
conflicts("%gcc@:10", when="@22.1_gcc-11.2")
|
||||
conflicts("%gcc@:9", when="@22.1_gcc-10.2")
|
||||
conflicts("%gcc@:8", when="@22.1_gcc-9.3")
|
||||
|
||||
@@ -90,6 +90,7 @@ class Ascent(CMakePackage, CudaPackage):
|
||||
variant("dray", default=False, description="Build with Devil Ray support")
|
||||
variant("adios2", default=False, description="Build Adios2 filter support")
|
||||
variant("fides", default=False, description="Build Fides filter support")
|
||||
variant("occa", default=False, description="Build with OCCA support")
|
||||
|
||||
# caliper
|
||||
variant("caliper", default=False, description="Build Caliper support")
|
||||
@@ -213,6 +214,9 @@ class Ascent(CMakePackage, CudaPackage):
|
||||
depends_on("mfem+shared", when="+mfem+shared")
|
||||
depends_on("mfem~shared", when="+mfem~shared")
|
||||
|
||||
# occa
|
||||
depends_on("occa", when="+occa")
|
||||
|
||||
# fides
|
||||
depends_on("fides", when="+fides")
|
||||
|
||||
@@ -615,6 +619,15 @@ def hostconfig(self):
|
||||
else:
|
||||
cfg.write("# mfem not built by spack \n")
|
||||
|
||||
#######################
|
||||
# OCCA
|
||||
#######################
|
||||
if "+occa" in spec:
|
||||
cfg.write("# occa from spack \n")
|
||||
cfg.write(cmake_cache_entry("OCCA_DIR", spec["occa"].prefix))
|
||||
else:
|
||||
cfg.write("# occa not built by spack \n")
|
||||
|
||||
#######################
|
||||
# Devil Ray
|
||||
#######################
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user