Compare commits
257 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
783bbdf2db | ||
|
|
a65f13f79f | ||
|
|
fc391d5332 | ||
|
|
e05f12f18e | ||
|
|
14f248652c | ||
|
|
87f99de3fb | ||
|
|
9e7fe04a77 | ||
|
|
45d149c7d3 | ||
|
|
8250a085b0 | ||
|
|
39b9f214a8 | ||
|
|
7631b5ea14 | ||
|
|
a4d2f8332f | ||
|
|
007f02e06a | ||
|
|
8ec1657136 | ||
|
|
c5fc794d77 | ||
|
|
5c409f794a | ||
|
|
06b30842e4 | ||
|
|
ebbe63013d | ||
|
|
3f7f10ca2b | ||
|
|
6a5a074150 | ||
|
|
c046c61cff | ||
|
|
7bd95f6ad3 | ||
|
|
4429e17db0 | ||
|
|
65dd6e1196 | ||
|
|
a43da48d23 | ||
|
|
f9c06669ca | ||
|
|
11c6431c9a | ||
|
|
1e85a1b227 | ||
|
|
b81aa42179 | ||
|
|
c59f68a33d | ||
|
|
743a93902d | ||
|
|
5bc5139552 | ||
|
|
3be450c16f | ||
|
|
c733fe9c34 | ||
|
|
e2edb45d2c | ||
|
|
b2a95fb4b7 | ||
|
|
7bf7a266ba | ||
|
|
2341074694 | ||
|
|
1c0dbab821 | ||
|
|
865c8b606c | ||
|
|
c98afbc44c | ||
|
|
57cd822fb7 | ||
|
|
627c2d3bf6 | ||
|
|
3b1b261cd8 | ||
|
|
40c4c81c19 | ||
|
|
642451e047 | ||
|
|
6630ddb47d | ||
|
|
7fd7d0b9fd | ||
|
|
f7d71ec792 | ||
|
|
d80bc70481 | ||
|
|
81cfe39ae3 | ||
|
|
ed058fd212 | ||
|
|
1da12490fa | ||
|
|
8b5b4ade0e | ||
|
|
12bc4cf093 | ||
|
|
f8676db7f4 | ||
|
|
dd747c5c48 | ||
|
|
cf031e83f0 | ||
|
|
f709518916 | ||
|
|
aa9eb33108 | ||
|
|
818c9aeb5a | ||
|
|
cfdf19ed6b | ||
|
|
566754440f | ||
|
|
f0658243c0 | ||
|
|
06b6b05dbd | ||
|
|
189cd59d13 | ||
|
|
5a43f4ba55 | ||
|
|
29aa7117f4 | ||
|
|
d367b4285a | ||
|
|
260e735425 | ||
|
|
ca872f9c34 | ||
|
|
b72a268bc5 | ||
|
|
818195a3bd | ||
|
|
679d41ea66 | ||
|
|
86216cc36e | ||
|
|
ecb7ad493f | ||
|
|
fb1e81657c | ||
|
|
34e4c62e8c | ||
|
|
acb02326aa | ||
|
|
c1756257c2 | ||
|
|
1ee7c735ec | ||
|
|
22deed708e | ||
|
|
6693dc5eb8 | ||
|
|
396f219011 | ||
|
|
a3ecd7efed | ||
|
|
ae5511afd6 | ||
|
|
78fe2c63fa | ||
|
|
f4f396745e | ||
|
|
f3c6d892b1 | ||
|
|
2f5988cec7 | ||
|
|
44922f734d | ||
|
|
144e657c58 | ||
|
|
6f48fe2b6f | ||
|
|
fcd03adc02 | ||
|
|
0620b954be | ||
|
|
6174b829f7 | ||
|
|
0d4b1c6a73 | ||
|
|
fb9797bd67 | ||
|
|
4eee3c12c1 | ||
|
|
3e5f9a2138 | ||
|
|
8295a45999 | ||
|
|
5138c71d34 | ||
|
|
eef9939c21 | ||
|
|
ffddaabaa0 | ||
|
|
f664d1edaa | ||
|
|
6d5d1562bd | ||
|
|
70c71e8f93 | ||
|
|
d9d1eb24f9 | ||
|
|
cef59ad0bf | ||
|
|
a1e117a98b | ||
|
|
cb855d5ffd | ||
|
|
3866ff0096 | ||
|
|
6dc167e43d | ||
|
|
0fd085be8e | ||
|
|
74fba221f1 | ||
|
|
deeeb86067 | ||
|
|
98daf5b7ec | ||
|
|
8a3d98b632 | ||
|
|
0cc945b367 | ||
|
|
e732155e8c | ||
|
|
c07fb833a9 | ||
|
|
7d566b481f | ||
|
|
a72e5e762e | ||
|
|
0eb22ef770 | ||
|
|
95f78440f1 | ||
|
|
74a51aba50 | ||
|
|
b370ecfbda | ||
|
|
04d55b7600 | ||
|
|
d695438851 | ||
|
|
f0447d63ad | ||
|
|
e8a7a04f14 | ||
|
|
23316f0352 | ||
|
|
b3433cb872 | ||
|
|
349ba83bc6 | ||
|
|
ecfd9ef12b | ||
|
|
4502351659 | ||
|
|
8a08f09ac0 | ||
|
|
60ecd0374e | ||
|
|
52ccee79d8 | ||
|
|
7f0f1b63d6 | ||
|
|
b65f1f22ec | ||
|
|
e9efa1df75 | ||
|
|
884a5b8b07 | ||
|
|
91d674f5d0 | ||
|
|
76fbb8cd8f | ||
|
|
0f3f2a8024 | ||
|
|
5a5f774369 | ||
|
|
f5212ae139 | ||
|
|
4b618704bf | ||
|
|
46285d9725 | ||
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
9
.github/workflows/windows_python.yml
vendored
9
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -75,6 +75,5 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
|
||||
32
SECURITY.md
32
SECURITY.md
@@ -2,24 +2,26 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
set spack="%SPACK_ROOT%"\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -39,12 +39,26 @@ function Read-SpackArgs {
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
@@ -61,7 +75,7 @@ function Invoke-SpackCD {
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
@@ -69,46 +83,46 @@ function Invoke-SpackEnv {
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +130,7 @@ function Invoke-SpackLoad {
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
@@ -128,5 +142,5 @@ switch($SpackSubCommand)
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -9,9 +9,32 @@
|
||||
Bundle
|
||||
------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
associated software is specified as bundle dependencies.
|
||||
``BundlePackage`` represents a set of packages that are expected to work
|
||||
well together, such as a collection of commonly used software libraries.
|
||||
The associated software is specified as dependencies.
|
||||
|
||||
If it makes sense, variants, conflicts, and requirements can be added to
|
||||
the package. :ref:`Variants <variants>` ensure that common build options
|
||||
are consistent across the packages supporting them. :ref:`Conflicts
|
||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||
bugs or limitations.
|
||||
|
||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||
it could use the ``require`` directive as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
|
||||
113
lib/spack/docs/gpu_configuration.rst
Normal file
113
lib/spack/docs/gpu_configuration.rst
Normal file
@@ -0,0 +1,113 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
@@ -77,6 +77,7 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -363,6 +363,42 @@ one of these::
|
||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||
``nano``, and ``notepad``, in that order.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Bundling software
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
common build options are consistently applied to dependencies. Known build
|
||||
failures, such as not building on a platform or when certain compilers or
|
||||
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
||||
Build requirements, such as only building with specific compilers, can similarly
|
||||
be flagged with :ref:`requires <packaging_conflicts>`.
|
||||
|
||||
The ``spack create --template bundle`` command will create a skeleton
|
||||
``BundlePackage`` ``package.py`` for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create --template bundle --name coolsdk
|
||||
|
||||
Now you can fill in the basic package documentation, version(s), and software
|
||||
package dependencies along with any other relevant customizations.
|
||||
|
||||
.. note::
|
||||
|
||||
Remember that bundle packages have no software of their own so there
|
||||
is nothing to download.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-downloadable software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -610,7 +646,16 @@ add a line like this in the package class:
|
||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||
|
||||
Versions should be listed in descending order, from newest to oldest.
|
||||
.. note::
|
||||
|
||||
By convention, we list versions in descending order, from newest to oldest.
|
||||
|
||||
.. note::
|
||||
|
||||
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
||||
there is nothing to fetch. Consequently, their version directives
|
||||
consist solely of the version name (e.g., ``version("202309")``).
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Date Versions
|
||||
@@ -2678,7 +2723,7 @@ Conflicts and requirements
|
||||
--------------------------
|
||||
|
||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||
to build e.g. against other dependencies or with certain compilers. Spack
|
||||
from building e.g. against other dependencies or with certain compilers. Spack
|
||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||
|
||||
Adding the following to a package:
|
||||
@@ -4773,17 +4818,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4972,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5192,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5271,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5247,14 +5291,14 @@ Saving build-time files
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5306,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5335,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5372,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5393,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5440,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5410,7 +5456,7 @@ Reading expected output from a file
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
@@ -5444,7 +5490,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5457,7 +5503,7 @@ source code, the path would be obtained as shown below.
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -5494,9 +5540,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5585,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -5551,7 +5600,7 @@ Inheriting stand-alone tests
|
||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||
packages that inherit from or provide interface implementations for those
|
||||
packages, respectively.
|
||||
packages, respectively.
|
||||
|
||||
The table below summarizes the stand-alone tests that will be executed along
|
||||
with those implemented in the package itself.
|
||||
@@ -5621,7 +5670,7 @@ for ``openmpi``:
|
||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||
...
|
||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||
Openmpi::test_bin_mpirun .. PASSED
|
||||
Openmpi::test_bin_ompi_info .. PASSED
|
||||
@@ -6071,7 +6120,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.5
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -18,11 +18,13 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
@@ -101,7 +103,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
@@ -169,7 +171,7 @@ def rename(src, dst):
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -566,7 +568,7 @@ def set_install_permissions(path):
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
# this function will be invoked on the target. If the file is outside a
|
||||
# Spack-maintained prefix, the permissions should not be modified.
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
return
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0o755)
|
||||
@@ -635,7 +637,7 @@ def chmod_x(entry, perms):
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link."""
|
||||
if os.path.islink(dest):
|
||||
if islink(dest):
|
||||
return
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
@@ -721,26 +723,12 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -763,6 +751,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -770,6 +760,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -783,6 +775,11 @@ def copy_tree(
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
# symlinks at the end.
|
||||
links = []
|
||||
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
@@ -805,7 +802,7 @@ def copy_tree(
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True,
|
||||
):
|
||||
if os.path.islink(s):
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
@@ -819,7 +816,9 @@ def escaped_path(path):
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
symlink(target, d)
|
||||
links.append((target, d, s))
|
||||
continue
|
||||
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -834,9 +833,17 @@ def escaped_path(path):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -847,12 +854,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1256,7 +1272,12 @@ def traverse_tree(
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||
paths too.
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1283,11 +1304,24 @@ def traverse_tree(
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||
# but only do this if the ignore is defined.
|
||||
if ignore is not None:
|
||||
if islink(source_child) and not follow_links:
|
||||
target = readlink(source_child)
|
||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||
if any(map(ignore, all_parents)):
|
||||
tty.warn(
|
||||
f"Skipping {source_path} because the source or a part of the source's "
|
||||
f"path is included in the ignores."
|
||||
)
|
||||
continue
|
||||
|
||||
# Treat as a directory
|
||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1313,7 +1347,11 @@ def traverse_tree(
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
@@ -1528,7 +1566,7 @@ def remove_if_dead_link(path):
|
||||
Parameters:
|
||||
path (str): The potential dead link
|
||||
"""
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
if islink(path) and not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@@ -1587,7 +1625,7 @@ def remove_linked_tree(path):
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
@@ -1754,9 +1792,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -2688,7 +2731,7 @@ def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
if os.path.isfile(entry) or islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
||||
@@ -2,77 +2,188 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
|
||||
from llnl.util import lang
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
Create a link.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link patg ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
Windows-only methods will return false for other operating systems.
|
||||
|
||||
For Windows: spack considers symlinks, hard links, and junctions to
|
||||
all be links, so if any of those are True, return True.
|
||||
|
||||
Args:
|
||||
path (str): path to check if it is a link.
|
||||
|
||||
Returns:
|
||||
bool - whether the path is any kind link or not.
|
||||
"""
|
||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
def _windows_is_hardlink(path: str) -> bool:
|
||||
"""Determines if a path is a windows hard link. This is accomplished
|
||||
by looking at the number of links using os.stat. A non-hard-linked file
|
||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||
larger than 1. Note that both the original and hard-linked file will
|
||||
return True because they share the same inode.
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
Args:
|
||||
path (str): Windows path to check for a hard link
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
Returns:
|
||||
bool - Whether the path is a hard link or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||
return False
|
||||
|
||||
CreateHardLink(link, path)
|
||||
return os.stat(path).st_nlink > 1
|
||||
|
||||
|
||||
def _windows_is_junction(path: str) -> bool:
|
||||
"""Determines if a path is a windows junction. A junction can be
|
||||
determined using a bitwise AND operation between the file's
|
||||
attribute bitmask and the known junction bitmask (0x400).
|
||||
|
||||
Args:
|
||||
path (str): A non-file path
|
||||
|
||||
Returns:
|
||||
bool - whether the path is a junction or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||
return False
|
||||
|
||||
import ctypes.wintypes
|
||||
|
||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
invalid_file_attributes = 0xFFFFFFFF
|
||||
reparse_point = 0x400
|
||||
file_attr = get_file_attributes(str(path))
|
||||
|
||||
if file_attr == invalid_file_attributes:
|
||||
return False
|
||||
|
||||
return file_attr & reparse_point > 0
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
def _windows_can_symlink() -> bool:
|
||||
"""
|
||||
Determines if windows is able to make a symlink depending on
|
||||
the system configuration and the level of the user's permissions.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||
return False
|
||||
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
dpath = os.path.join(tempdir, "dpath")
|
||||
fpath = os.path.join(tempdir, "fpath.txt")
|
||||
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
dlink = os.path.join(tempdir, "dlink")
|
||||
flink = os.path.join(tempdir, "flink.txt")
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -96,24 +207,136 @@ def _win32_can_symlink():
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
def _windows_create_link(source: str, link: str):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
Attempts to create a Hard Link or Junction as an alternative
|
||||
to a symbolic link. This is called when symbolic links cannot
|
||||
be created.
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||
elif os.path.isdir(source):
|
||||
_windows_create_junction(source=source, link=link)
|
||||
elif os.path.isfile(source):
|
||||
_windows_create_hard_link(path=source, link=link)
|
||||
else:
|
||||
raise SymlinkError(
|
||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
def _windows_create_junction(source: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a junction,
|
||||
then create the junction.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
import subprocess
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
return False
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a hard
|
||||
link, then create the hard link.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
"""Find all of the files that point to the same inode as the link"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||
link = os.path.abspath(link)
|
||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||
|
||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||
links.remove(link)
|
||||
if len(links) == 1:
|
||||
return links.pop()
|
||||
elif len(links) > 1:
|
||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||
else:
|
||||
raise SymlinkError("Cannot determine hard link source path.")
|
||||
|
||||
|
||||
def _windows_read_junction(link: str):
|
||||
"""Find the path that a junction points to."""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||
|
||||
link = os.path.abspath(link)
|
||||
link_basename = os.path.basename(link)
|
||||
link_parent = os.path.dirname(link)
|
||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||
if matches:
|
||||
return matches.group(1)
|
||||
else:
|
||||
raise SymlinkError("Could not find junction path.")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(SpackError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -49,6 +48,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
@@ -876,32 +876,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -1813,10 +1799,11 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
timer.start("extract")
|
||||
if os.path.exists(spec.prefix):
|
||||
if force:
|
||||
shutil.rmtree(spec.prefix)
|
||||
@@ -1896,7 +1883,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
timer.stop("extract")
|
||||
|
||||
timer.start("relocate")
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
@@ -1917,6 +1906,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
timer.stop("relocate")
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
@@ -2383,22 +2373,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -476,16 +476,16 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
"cmake",
|
||||
"bison",
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
"gawk",
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
"git",
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
search_list.append("winbison")
|
||||
externals = spack.detection.by_path(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
@@ -137,7 +137,7 @@ def _install_with_depfile(self) -> None:
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
EnvironmentModifications,
|
||||
@@ -537,39 +537,6 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
parallel=False, command_line=None, config_default=None, max_cpus=None
|
||||
):
|
||||
"""
|
||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||
number of jobs set on the command line. If not set, then we use the config
|
||||
defaults (which is usually set through the builtin config scope), but we
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel (bool or None): true when package supports parallel builds
|
||||
command_line (int or None): command line override
|
||||
config_default (int or None): config default number of jobs
|
||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
|
||||
if command_line is None and "command_line" in spack.config.scopes():
|
||||
command_line = spack.config.get("config:build_jobs", scope="command_line")
|
||||
|
||||
if command_line is not None:
|
||||
return command_line
|
||||
|
||||
max_cpus = max_cpus or cpus_available()
|
||||
|
||||
# in some rare cases _builtin config may not be set, so default to max 16
|
||||
config_default = config_default or spack.config.get("config:build_jobs", 16)
|
||||
|
||||
return min(max_cpus, config_default)
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
@@ -1027,7 +994,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1015,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1069,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1116,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1141,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1150,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1165,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1194,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
|
||||
@@ -274,7 +274,6 @@ def std_args(pkg, generator=None):
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -451,7 +450,6 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
"""
|
||||
|
||||
@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
|
||||
@@ -95,7 +95,7 @@ def makefile_root(self):
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def nmakefile_name(self):
|
||||
def makefile_name(self):
|
||||
"""Name of the current makefile. This is currently an empty value.
|
||||
If a project defines this value, it will be used with the /f argument
|
||||
to provide nmake an explicit makefile. This is usefule in scenarios where
|
||||
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
opts += self.nmake_install_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
|
||||
@@ -300,8 +300,8 @@ def get_external_python_for_prefix(self):
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
||||
@@ -10,9 +10,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -92,7 +93,7 @@ def install(self, pkg, spec, prefix):
|
||||
"--copy",
|
||||
"-i",
|
||||
"-j",
|
||||
str(determine_number_of_jobs(parallel)),
|
||||
str(determine_number_of_jobs(parallel=parallel)),
|
||||
"--",
|
||||
os.getcwd(),
|
||||
]
|
||||
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -296,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -326,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -340,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -253,6 +254,8 @@ def ci_rebuild(args):
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
rebuild_timer = timer.Timer()
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -736,6 +739,14 @@ def ci_rebuild(args):
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
extra_attributes = {"name": ".ci-rebuild"}
|
||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
|
||||
# Tie job success/failure to the success/failure of building the spec
|
||||
return install_exit_code
|
||||
|
||||
|
||||
@@ -812,6 +812,9 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
|
||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
@@ -185,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
@@ -239,6 +239,13 @@ def env_deactivate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to activate the environment",
|
||||
)
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -54,7 +55,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
@@ -120,46 +121,23 @@ def external_find(args):
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
# Outside the Cray manifest, the search is done by tag for performance reasons,
|
||||
# since tags are cached.
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
args.tags = []
|
||||
if args.all or args.packages:
|
||||
# Each detectable package has at least the detectable tag
|
||||
args.tags = ["detectable"]
|
||||
elif not args.tags:
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
|
||||
|
||||
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -173,6 +151,19 @@ def external_find(args):
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
def packages_to_search_for(
|
||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
return result
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
|
||||
@@ -64,11 +64,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
|
||||
@@ -52,6 +52,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
||||
@@ -443,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
|
||||
@@ -137,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -51,6 +51,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
|
||||
@@ -99,6 +99,28 @@ def cxx17_flag(self):
|
||||
else:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("8.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "< 8.0"
|
||||
)
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "< 11.0"
|
||||
)
|
||||
elif self.real_version < Version("14.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.5"):
|
||||
|
||||
@@ -29,6 +29,90 @@
|
||||
}
|
||||
|
||||
|
||||
class CmdCall:
|
||||
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
|
||||
|
||||
def __init__(self, *cmds):
|
||||
if not cmds:
|
||||
raise RuntimeError(
|
||||
"""Attempting to run commands from CMD without specifying commands.
|
||||
Please add commands to be run."""
|
||||
)
|
||||
self._cmds = cmds
|
||||
|
||||
def __call__(self):
|
||||
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
|
||||
return out.decode("utf-16le", errors="replace") # novermin
|
||||
|
||||
@property
|
||||
def cmd_line(self):
|
||||
base_call = "cmd /u /c "
|
||||
commands = " && ".join([x.command_str() for x in self._cmds])
|
||||
# If multiple commands are being invoked by a single subshell
|
||||
# they must be encapsulated by a double quote. Always double
|
||||
# quote to be sure of proper handling
|
||||
# cmd will properly resolve nested double quotes as needed
|
||||
#
|
||||
# `set`` writes out the active env to the subshell stdout,
|
||||
# and in this context we are always trying to obtain env
|
||||
# state so it should always be appended
|
||||
return base_call + f'"{commands} && set"'
|
||||
|
||||
|
||||
class VarsInvocation:
|
||||
def __init__(self, script):
|
||||
self._script = script
|
||||
|
||||
def command_str(self):
|
||||
return f'"{self._script}"'
|
||||
|
||||
@property
|
||||
def script(self):
|
||||
return self._script
|
||||
|
||||
|
||||
class VCVarsInvocation(VarsInvocation):
|
||||
def __init__(self, script, arch, msvc_version):
|
||||
super(VCVarsInvocation, self).__init__(script)
|
||||
self._arch = arch
|
||||
self._msvc_version = msvc_version
|
||||
|
||||
@property
|
||||
def sdk_ver(self):
|
||||
"""Accessor for Windows SDK version property
|
||||
|
||||
Note: This property may not be set by
|
||||
the calling context and as such this property will
|
||||
return an empty string
|
||||
|
||||
This property will ONLY be set if the SDK package
|
||||
is a dependency somewhere in the Spack DAG of the package
|
||||
for which we are constructing an MSVC compiler env.
|
||||
Otherwise this property should be unset to allow the VCVARS
|
||||
script to use its internal heuristics to determine appropriate
|
||||
SDK version
|
||||
"""
|
||||
if getattr(self, "_sdk_ver", None):
|
||||
return self._sdk_ver + ".0"
|
||||
return ""
|
||||
|
||||
@sdk_ver.setter
|
||||
def sdk_ver(self, val):
|
||||
self._sdk_ver = val
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
return self._arch
|
||||
|
||||
@property
|
||||
def vcvars_ver(self):
|
||||
return f"-vcvars_ver={self._msvc_version}"
|
||||
|
||||
def command_str(self):
|
||||
script = super(VCVarsInvocation, self).command_str()
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
@@ -75,22 +159,48 @@ class Msvc(Compiler):
|
||||
# file based on compiler executable path.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
super().__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||
env_cmds.append(self.vcvars_call)
|
||||
# Below is a check for a valid fortran path
|
||||
# paths has c, cxx, fc, and f77 paths in that order
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
# If this found, it sets all the vars
|
||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
||||
else:
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
|
||||
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
# env first
|
||||
env_cmds.extend(
|
||||
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
@@ -119,16 +229,30 @@ def platform_toolset_ver(self):
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
# non zero return code if it is not provided
|
||||
# and input file
|
||||
return Version(
|
||||
re.search(
|
||||
Msvc.version_regex,
|
||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
||||
spack.compiler.get_compiler_version_output(
|
||||
compiler, version_arg=None, ignore_errors=True
|
||||
),
|
||||
).group(1)
|
||||
)
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
return self._compiler_version(self.cc)
|
||||
|
||||
@property
|
||||
def ifx_version(self):
|
||||
"""Ifx compiler version associated with this version of MSVC"""
|
||||
return self._compiler_version(self.fc)
|
||||
|
||||
@property
|
||||
def vs_root(self):
|
||||
# The MSVC install root is located at a fix level above the compiler
|
||||
@@ -146,27 +270,12 @@ def setup_custom_environment(self, pkg, env):
|
||||
# output, sort into dictionary, use that to make the build
|
||||
# environment.
|
||||
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
'cmd /u /c "{}" {} {} {} && set'.format(
|
||||
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
||||
),
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
if sys.version_info[0] >= 3:
|
||||
out = out.decode("utf-16le", errors="replace") # novermin
|
||||
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
|
||||
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
|
||||
|
||||
out = self.msvc_compiler_environment()
|
||||
int_env = dict(
|
||||
(key, value)
|
||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||
|
||||
@@ -857,12 +857,12 @@ def add_from_file(filename, scope=None):
|
||||
def add(fullpath, scope=None):
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
|
||||
components = process_config_path(fullpath)
|
||||
|
||||
has_existing_value = True
|
||||
path = ""
|
||||
override = False
|
||||
value = syaml.load_config(components[-1])
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = "::" if override else ":" if path else ""
|
||||
@@ -883,14 +883,14 @@ def add(fullpath, scope=None):
|
||||
existing = get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1 : -1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if override:
|
||||
path += "::"
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = fullpath.rpartition(":")
|
||||
value = syaml.load_config(value)
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
@@ -1231,11 +1231,17 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
#
|
||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
||||
# trailing ':')
|
||||
#
|
||||
def process_config_path(path):
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||
quoted path components outside of the value will be considered ill formed and will
|
||||
raise.
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
|
||||
[this, is, a, path, value:with:colon]
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
@@ -1263,6 +1269,17 @@ def process_config_path(path):
|
||||
front.append = True
|
||||
|
||||
result.append(front)
|
||||
|
||||
quote = "['\"]"
|
||||
not_quote = "[^'\"]"
|
||||
|
||||
if re.match(f"^{quote}", path):
|
||||
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
|
||||
if not m:
|
||||
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||
result.append(m.group(1))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -3,12 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_executable, by_library, executables_in_path
|
||||
from .path import by_path, executables_in_path
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_library",
|
||||
"by_executable",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
|
||||
@@ -13,13 +13,13 @@
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
"""
|
||||
import collections
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
@@ -29,12 +29,28 @@
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
#: Information on a package that has been detected
|
||||
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml():
|
||||
"""Return all the specs mentioned as externals in packages.yaml"""
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
already_defined_specs = set()
|
||||
for pkg_name, package_configuration in packages_yaml.items():
|
||||
@@ -43,7 +59,12 @@ def _externals_in_packages_yaml():
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _pkg_config_dict(external_pkg_entries):
|
||||
ExternalEntryType = Union[str, Dict[str, str]]
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
@@ -65,7 +86,10 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
|
||||
@@ -83,15 +107,14 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
return pkg_dict
|
||||
|
||||
|
||||
def _spec_is_valid(spec):
|
||||
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
try:
|
||||
str(spec)
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from
|
||||
# the spec so we can look up the implementation of
|
||||
# determine_spec_details
|
||||
msg = "Constructed spec for {0} does not have a string representation"
|
||||
llnl.util.tty.warn(msg.format(spec.name))
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -106,7 +129,7 @@ def _spec_is_valid(spec):
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths):
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
@@ -124,7 +147,7 @@ def path_to_dict(search_paths):
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def is_executable(file_path):
|
||||
def is_executable(file_path: str) -> bool:
|
||||
"""Return True if the path passed as argument is that of an executable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
||||
|
||||
@@ -146,7 +169,7 @@ def _convert_to_iterable(single_val_or_multiple):
|
||||
return [x]
|
||||
|
||||
|
||||
def executable_prefix(executable_dir):
|
||||
def executable_prefix(executable_dir: str) -> str:
|
||||
"""Given a directory where an executable is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
@@ -167,12 +190,12 @@ def executable_prefix(executable_dir):
|
||||
return os.sep.join(components[:idx])
|
||||
|
||||
|
||||
def library_prefix(library_dir):
|
||||
"""Given a directory where an library is found, guess the prefix
|
||||
def library_prefix(library_dir: str) -> str:
|
||||
"""Given a directory where a library is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
Args:
|
||||
library_dir: directory where an library is found
|
||||
library_dir: directory where a library is found
|
||||
"""
|
||||
# Given a prefix where an library is found, assuming that prefix
|
||||
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
|
||||
@@ -195,13 +218,17 @@ def library_prefix(library_dir):
|
||||
return library_dir
|
||||
|
||||
|
||||
def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages (list): list of DetectedPackage objects to be added
|
||||
scope (str): configuration scope where to add the detected packages
|
||||
buildable (bool): whether the detected packages are buildable or not
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
@@ -209,7 +236,10 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
@@ -222,16 +252,19 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def _windows_drive():
|
||||
"""Return Windows drive string extracted from PROGRAMFILES
|
||||
env var, which is garunteed to be defined for all logins"""
|
||||
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
|
||||
return drive
|
||||
def _windows_drive() -> str:
|
||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||
which is guaranteed to be defined for all logins.
|
||||
"""
|
||||
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
|
||||
if match is None:
|
||||
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
|
||||
return match.group(1)
|
||||
|
||||
|
||||
class WindowsCompilerExternalPaths:
|
||||
@staticmethod
|
||||
def find_windows_compiler_root_paths():
|
||||
def find_windows_compiler_root_paths() -> List[str]:
|
||||
"""Helper for Windows compiler installation root discovery
|
||||
|
||||
At the moment simply returns location of VS install paths from VSWhere
|
||||
@@ -239,7 +272,7 @@ def find_windows_compiler_root_paths():
|
||||
return list(winOs.WindowsOs.vs_install_paths)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_cmake_paths():
|
||||
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||
"""Semi hard-coded search path for cmake bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(
|
||||
@@ -249,7 +282,7 @@ def find_windows_compiler_cmake_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_ninja_paths():
|
||||
def find_windows_compiler_ninja_paths() -> List[str]:
|
||||
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
|
||||
@@ -257,7 +290,7 @@ def find_windows_compiler_ninja_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_bundled_packages():
|
||||
def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
"""Return all MSVC compiler bundled packages"""
|
||||
return (
|
||||
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
|
||||
@@ -266,14 +299,15 @@ def find_windows_compiler_bundled_packages():
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
plat_major_ver = None
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots():
|
||||
def find_windows_kit_roots() -> Optional[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
return None
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
@@ -281,21 +315,23 @@ def find_windows_kit_roots():
|
||||
return kit_base
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base=None):
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base=None):
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths():
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
"""Provides a list of all installation paths
|
||||
for the WDK by version and architecture
|
||||
"""
|
||||
@@ -303,7 +339,7 @@ def find_windows_driver_development_kit_paths():
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_installed_roots_paths():
|
||||
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
@@ -316,7 +352,7 @@ def find_windows_kit_reg_installed_roots_paths():
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths():
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
@@ -330,7 +366,7 @@ def find_windows_kit_reg_sdk_paths():
|
||||
)
|
||||
|
||||
|
||||
def find_win32_additional_install_paths():
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
"""Not all programs on Windows live on the PATH
|
||||
Return a list of other potential install locations.
|
||||
"""
|
||||
@@ -357,13 +393,12 @@ def find_win32_additional_install_paths():
|
||||
return windows_search_ext
|
||||
|
||||
|
||||
def compute_windows_program_path_for_package(pkg):
|
||||
"""Given a package, attempt to compute its Windows
|
||||
program files location, return list of best guesses
|
||||
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package, attempts to compute its Windows program files location,
|
||||
and returns the list of best guesses.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which
|
||||
Program Files location is to be computed
|
||||
pkg: package for which Program Files location is to be computed
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
@@ -378,7 +413,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
]
|
||||
|
||||
|
||||
def compute_windows_user_path_for_package(pkg):
|
||||
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package attempt to compute its user scoped
|
||||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
@@ -18,7 +20,7 @@
|
||||
import spack.util.environment
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import ( # find_windows_compiler_bundled_packages,
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
@@ -31,8 +33,13 @@
|
||||
path_to_dict,
|
||||
)
|
||||
|
||||
#: Timeout used for package detection (seconds)
|
||||
DETECTION_TIMEOUT = 60
|
||||
if sys.platform == "win32":
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
def common_windows_package_paths():
|
||||
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
paths.extend(find_win32_additional_install_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
@@ -41,7 +48,7 @@ def common_windows_package_paths():
|
||||
return paths
|
||||
|
||||
|
||||
def executables_in_path(path_hints):
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
@@ -52,7 +59,7 @@ def executables_in_path(path_hints):
|
||||
assumed there are two different instances of the executable.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
@@ -61,7 +68,9 @@ def executables_in_path(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||
standard system library paths.
|
||||
@@ -74,7 +83,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
assumed there are two different instances of the library.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
@@ -90,7 +99,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints):
|
||||
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
@@ -106,218 +115,250 @@ def libraries_in_windows_paths(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
return groups.items()
|
||||
return groups
|
||||
|
||||
|
||||
# TODO consolidate this with by_executable
|
||||
# Packages should be able to define both .libraries and .executables in the future
|
||||
# determine_spec_details should get all relevant libraries and executables in one call
|
||||
def by_library(packages_to_check, path_hints=None):
|
||||
# Techniques for finding libraries is determined on a per recipe basis in
|
||||
# the determine_version class method. Some packages will extract the
|
||||
# version number from a shared libraries filename.
|
||||
# Other libraries could use the strings function to extract it as described
|
||||
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
|
||||
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of packages to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
|
||||
and standard system library paths.
|
||||
"""
|
||||
# If no path hints from command line, intialize to empty list so
|
||||
# we can add default hints on a per package basis
|
||||
path_hints = [] if path_hints is None else path_hints
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
lib_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "libraries"):
|
||||
for lib in pkg.libraries:
|
||||
lib_pattern_to_pkgs[lib].append(pkg)
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
|
||||
path_to_lib_name = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints)
|
||||
)
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
pkg_to_found_libs = collections.defaultdict(set)
|
||||
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(lib_pattern)
|
||||
for path, lib in path_to_lib_name.items():
|
||||
if compiled_re.search(lib):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_libs[pkg].add(path)
|
||||
Args:
|
||||
pkg: package being detected
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> lib found for the spec
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
"""Returns a list of candidate files found on the system.
|
||||
|
||||
for pkg, libs in pkg_to_found_libs.items():
|
||||
Args:
|
||||
patterns: search patterns to be used for matching files
|
||||
paths: paths where to search for files
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
"""Given a path where a file was found, returns the corresponding prefix.
|
||||
|
||||
Args:
|
||||
path: path of a detected file
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
paths: files matching the package search patterns
|
||||
"""
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
warnings.warn(
|
||||
f"{pkg.name} must define 'determine_spec_details' in order"
|
||||
f" for Spack to detect externally-provided instances"
|
||||
f" of the package."
|
||||
)
|
||||
continue
|
||||
return []
|
||||
|
||||
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following libraries in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
|
||||
)
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = library_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
|
||||
"add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Libraries in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def by_executable(packages_to_check, path_hints=None):
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of package classes to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "executables"):
|
||||
for exe in pkg.platform_executables():
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
# Add Windows specific, package related paths to the search paths
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
|
||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
||||
pkg_to_found_exes = collections.defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in path_to_exe_name.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(candidate_path, items_in_prefix)
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following executables in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
|
||||
)
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
if not specs:
|
||||
files = ", ".join(_convert_to_iterable(items_in_prefix))
|
||||
llnl.util.tty.debug(
|
||||
f"The following files in {candidate_path} were decidedly not "
|
||||
f"part of the package {pkg.name}: {files}"
|
||||
)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||
for spec in specs:
|
||||
prefix = self.prefix_from_path(path=candidate_path)
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
f"Files in {candidate_path} and {prior_prefix} are both associated"
|
||||
f" with the same spec {str(spec)}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
resolved_specs[spec] = candidate_path
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
f'"{spec}" has been detected on the system but will '
|
||||
f"not be added to packages.yaml [reason={str(e)}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
if not result:
|
||||
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
class LibrariesFinder(Finder):
|
||||
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in libraries_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = library_prefix(path)
|
||||
if not result:
|
||||
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
path_hints: initial list of paths to be searched
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
for pkg_name, futures in detected_specs_by_package.items():
|
||||
for future in futures:
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -42,6 +42,7 @@ class OpenMpi(Package):
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
@@ -407,10 +408,7 @@ def version(
|
||||
|
||||
def _execute_version(pkg, ver, **kwargs):
|
||||
if (
|
||||
any(
|
||||
s in kwargs
|
||||
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
|
||||
)
|
||||
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||
and hasattr(pkg, "has_code")
|
||||
and not pkg.has_code
|
||||
):
|
||||
@@ -760,7 +758,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
|
||||
@@ -1504,7 +1504,7 @@ def _concretize_separately(self, tests=False):
|
||||
start = time.time()
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
spack.config.get("config:build_jobs"), # Cap on build jobs
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||
)
|
||||
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
@@ -1994,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2062,7 +2058,7 @@ def matching_spec(self, spec):
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
@@ -2370,7 +2366,7 @@ def display_specs(concretized_specs):
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.display_format,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
@@ -2668,6 +2664,26 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.yaml_content = with_defaults_added
|
||||
self.changed = False
|
||||
|
||||
def _all_matches(self, user_spec: str) -> List[str]:
|
||||
"""Maps the input string to the first equivalent user spec in the manifest,
|
||||
and returns it.
|
||||
|
||||
Args:
|
||||
user_spec: user spec to be found
|
||||
|
||||
Raises:
|
||||
ValueError: if no equivalent match is found
|
||||
"""
|
||||
result = []
|
||||
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||
result.append(yaml_spec_str)
|
||||
|
||||
if not result:
|
||||
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
|
||||
|
||||
return result
|
||||
|
||||
def add_user_spec(self, user_spec: str) -> None:
|
||||
"""Appends the user spec passed as input to the list of root specs.
|
||||
|
||||
@@ -2688,8 +2704,9 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
for key in self._all_matches(user_spec):
|
||||
self.pristine_configuration["specs"].remove(key)
|
||||
self.configuration["specs"].remove(key)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
|
||||
@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -82,7 +82,7 @@ def deactivate_header(shell):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
||||
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
||||
@@ -90,6 +90,16 @@
|
||||
STATUS_REMOVED = "removed"
|
||||
|
||||
|
||||
def _write_timer_json(pkg, timer, cache):
|
||||
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||
try:
|
||||
with open(pkg.times_log_path, "w") as timelog:
|
||||
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
return
|
||||
|
||||
|
||||
class InstallAction:
|
||||
#: Don't perform an install
|
||||
NONE = 0
|
||||
@@ -399,6 +409,8 @@ def _install_from_cache(
|
||||
return False
|
||||
t.stop()
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec, explicit)
|
||||
@@ -481,7 +493,7 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
pkg.spec, download_result, unsigned=unsigned, force=False
|
||||
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
||||
)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
@@ -592,7 +604,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
if node is spec:
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
fs.install_tree(
|
||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
|
||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
@@ -1316,7 +1330,6 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
|
||||
Preparation includes cleaning up installation and stage directories
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
@@ -2092,7 +2105,6 @@ def install(self) -> None:
|
||||
# another process has a write lock so must be (un)installing
|
||||
# the spec (or that process is hung).
|
||||
ltype, lock = self._ensure_locked("read", pkg)
|
||||
|
||||
# Requeue the spec if we cannot get at least a read lock so we
|
||||
# can check the status presumably established by another process
|
||||
# -- failed, installed, or uninstalled -- on the next pass.
|
||||
@@ -2372,8 +2384,7 @@ def run(self) -> bool:
|
||||
|
||||
# Stop the timer and save results
|
||||
self.timer.stop()
|
||||
with open(self.pkg.times_log_path, "w") as timelog:
|
||||
self.timer.write_json(timelog)
|
||||
_write_timer_json(self.pkg, self.timer, False)
|
||||
|
||||
print_install_test_log(self.pkg)
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
@@ -2394,7 +2405,9 @@ def _install_source(self) -> None:
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
def _real_install(self) -> None:
|
||||
import spack.builder
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
stat_names = pstats.Stats.sort_arg_dict_default
|
||||
|
||||
#: top-level aliases for Spack commands
|
||||
aliases = {"rm": "remove"}
|
||||
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
|
||||
|
||||
#: help levels in order of detail (i.e., number of commands shown)
|
||||
levels = ["short", "long"]
|
||||
|
||||
@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
update_dictionary_extending_lists(spec_configuration, action)
|
||||
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
|
||||
|
||||
# Transform keywords for dependencies or prerequisites into a list of spec
|
||||
|
||||
|
||||
@@ -96,6 +96,7 @@
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.executable import *
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
|
||||
@@ -180,6 +180,8 @@ class DetectablePackageMeta:
|
||||
for the detection function.
|
||||
"""
|
||||
|
||||
TAG = "detectable"
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
if hasattr(cls, "executables") and hasattr(cls, "libraries"):
|
||||
msg = "a package can have either an 'executables' or 'libraries' attribute"
|
||||
@@ -195,6 +197,11 @@ def __init__(cls, name, bases, attr_dict):
|
||||
# If a package has the executables or libraries attribute then it's
|
||||
# assumed to be detectable
|
||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||
# Append a tag to each detectable package, so that finding them is faster
|
||||
if hasattr(cls, "tags"):
|
||||
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||
else:
|
||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||
|
||||
@classmethod
|
||||
def platform_executables(cls):
|
||||
|
||||
@@ -288,9 +288,6 @@ def next_spec(
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
@@ -306,13 +303,12 @@ def all_specs(self) -> List[spack.spec.Spec]:
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
__slots__ = "ctx", "has_compiler", "has_version"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
@@ -343,7 +339,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -353,7 +348,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -367,7 +361,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
@@ -378,25 +371,21 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
@@ -411,12 +400,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
@@ -139,6 +139,8 @@ def craype_type_and_version(cls):
|
||||
# If no default version, sort available versions and return latest
|
||||
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
||||
versions_available.sort(reverse=True)
|
||||
if not versions_available:
|
||||
return (craype_type, None)
|
||||
return (craype_type, versions_available[0])
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith("."):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import List, NamedTuple
|
||||
from typing import List, NamedTuple, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -44,15 +44,18 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.path
|
||||
import spack.util.timer
|
||||
import spack.variant
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
from spack import traverse
|
||||
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
|
||||
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
||||
|
||||
# these are from clingo.ast and bootstrapped later
|
||||
ASTType = None
|
||||
parse_files = None
|
||||
@@ -267,12 +270,14 @@ def _id(thing):
|
||||
|
||||
@llnl.util.lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
__slots__ = ["name", "args"]
|
||||
|
||||
def __init__(self, name, args=None):
|
||||
self.name = name
|
||||
self.args = () if args is None else tuple(args)
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.args)
|
||||
return self.name, self.args
|
||||
|
||||
def __call__(self, *args):
|
||||
"""Return a new instance of this function with added arguments.
|
||||
@@ -567,6 +572,41 @@ def keyfn(x):
|
||||
return normalized_yaml
|
||||
|
||||
|
||||
def _is_checksummed_git_version(v):
|
||||
return isinstance(v, vn.GitVersion) and v.is_commit
|
||||
|
||||
|
||||
def _is_checksummed_version(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
"""Returns true iff the version is not a moving target"""
|
||||
version, info = version_info
|
||||
if isinstance(version, spack.version.StandardVersion):
|
||||
if any(h in info for h in spack.util.crypto.hashes.keys()) or "checksum" in info:
|
||||
return True
|
||||
return "commit" in info and len(info["commit"]) == 40
|
||||
return _is_checksummed_git_version(version)
|
||||
|
||||
|
||||
def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
"""Version order key for concretization, where preferred > not preferred,
|
||||
not deprecated > deprecated, finite > any infinite component; only if all are
|
||||
the same, do we use default version ordering."""
|
||||
version, info = version_info
|
||||
return (
|
||||
info.get("preferred", False),
|
||||
not info.get("deprecated", False),
|
||||
not version.isdevelop(),
|
||||
version,
|
||||
)
|
||||
|
||||
|
||||
def _spec_with_default_name(spec_str, name):
|
||||
"""Return a spec with a default name if none is provided, used for requirement specs"""
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = name
|
||||
return spec
|
||||
|
||||
|
||||
def bootstrap_clingo():
|
||||
global clingo, ASTType, parse_files
|
||||
|
||||
@@ -731,7 +771,9 @@ def fact(self, head):
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, "symbol") else head
|
||||
|
||||
self.out.write("%s.\n" % str(symbol))
|
||||
# This is commented out to avoid evaluating str(symbol) when we have no stream
|
||||
if not isinstance(self.out, llnl.util.lang.Devnull):
|
||||
self.out.write(f"{str(symbol)}.\n")
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
@@ -804,6 +846,8 @@ def visit(node):
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
||||
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
@@ -1363,26 +1407,29 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
cache = self._trigger_cache[named_cond.name]
|
||||
if named_cond not in cache:
|
||||
|
||||
named_cond_key = str(named_cond)
|
||||
if named_cond_key not in cache:
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
cache[named_cond] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond]
|
||||
cache[named_cond_key] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
cache = self._effect_cache[named_cond.name]
|
||||
if imposed_spec not in cache:
|
||||
imposed_spec_key = str(imposed_spec)
|
||||
if imposed_spec_key not in cache:
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
|
||||
if not node:
|
||||
requirements = list(
|
||||
filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
|
||||
)
|
||||
cache[imposed_spec] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec]
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
return condition_id
|
||||
|
||||
@@ -1820,37 +1867,11 @@ class Body:
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
|
||||
# TODO: We have to look up info from package.py here, but we'd
|
||||
# TODO: like to avoid this entirely. We should not need to look
|
||||
# TODO: up potentially wrong info if we have virtual edge info.
|
||||
try:
|
||||
try:
|
||||
pkg = dep.package
|
||||
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
virtuals = pkg.virtuals_provided
|
||||
|
||||
except spack.repo.UnknownPackageError:
|
||||
# Skip virtual node constriants for renamed/deleted packages,
|
||||
# so their binaries can still be installed.
|
||||
# NOTE: with current specs (which lack edge attributes) this
|
||||
# can allow concretizations with two providers, but it's unlikely.
|
||||
continue
|
||||
|
||||
# Don't concretize with two providers of the same virtual.
|
||||
# See above for exception for unknown packages.
|
||||
# TODO: we will eventually record provider information on edges,
|
||||
# TODO: which avoids the need for the package lookup above.
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_node", virtual.name))
|
||||
clauses.append(fn.provider(dep.name, virtual.name))
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name)
|
||||
)
|
||||
clauses.append(fn.attr("virtual_node", virtual_name))
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
@@ -1872,30 +1893,27 @@ class Body:
|
||||
|
||||
return clauses
|
||||
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
def define_package_versions_and_validate_preferences(
|
||||
self, possible_pkgs, require_checksum: bool
|
||||
):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
# package.py level, sort them in this partial list here
|
||||
def key_fn(item):
|
||||
version, info = item
|
||||
# When COMPARING VERSIONS, the '@develop' version is always
|
||||
# larger than other versions. BUT when CONCRETIZING, the largest
|
||||
# NON-develop version is selected by default.
|
||||
return (
|
||||
info.get("preferred", False),
|
||||
not info.get("deprecated", False),
|
||||
not version.isdevelop(),
|
||||
version,
|
||||
)
|
||||
package_py_versions = sorted(
|
||||
pkg_cls.versions.items(), key=_concretization_version_order, reverse=True
|
||||
)
|
||||
|
||||
for idx, item in enumerate(sorted(pkg_cls.versions.items(), key=key_fn, reverse=True)):
|
||||
v, version_info = item
|
||||
if require_checksum and pkg_cls.has_code:
|
||||
package_py_versions = [
|
||||
x for x in package_py_versions if _is_checksummed_version(x)
|
||||
]
|
||||
|
||||
for idx, (v, version_info) in enumerate(package_py_versions):
|
||||
self.possible_versions[pkg_name].add(v)
|
||||
self.declared_versions[pkg_name].append(
|
||||
DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY)
|
||||
@@ -1904,22 +1922,26 @@ def key_fn(item):
|
||||
if deprecated:
|
||||
self.deprecated_versions[pkg_name].add(v)
|
||||
|
||||
# All the preferred version from packages.yaml, versions in external
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
if pkg_name not in packages_yaml or "version" not in packages_yaml[pkg_name]:
|
||||
continue
|
||||
|
||||
version_defs = []
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for vstr in version_preferences:
|
||||
|
||||
for vstr in packages_yaml[pkg_name]["version"]:
|
||||
v = vn.ver(vstr)
|
||||
|
||||
if isinstance(v, vn.GitVersion):
|
||||
version_defs.append(v)
|
||||
if not require_checksum or v.is_commit:
|
||||
version_defs.append(v)
|
||||
else:
|
||||
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
|
||||
# Amongst all defined versions satisfying this specific
|
||||
# preference, the highest-numbered version is the
|
||||
# most-preferred: therefore sort satisfying versions
|
||||
# from greatest to least
|
||||
version_defs.extend(sorted(satisfying_versions, reverse=True))
|
||||
matches = [x for x in self.possible_versions[pkg_name] if x.satisfies(v)]
|
||||
matches.sort(reverse=True)
|
||||
if not matches:
|
||||
raise spack.config.ConfigError(
|
||||
f"Preference for version {v} does not match any known "
|
||||
f"version of {pkg_name} (in its package.py or any external)"
|
||||
)
|
||||
version_defs.extend(matches)
|
||||
|
||||
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
|
||||
self.declared_versions[pkg_name].append(
|
||||
@@ -1927,31 +1949,9 @@ def key_fn(item):
|
||||
)
|
||||
self.possible_versions[pkg_name].add(vdef)
|
||||
|
||||
def _check_for_defined_matching_versions(self, pkg_class, v):
|
||||
"""Given a version specification (which may be a concrete version,
|
||||
range, etc.), determine if any package.py version declarations
|
||||
or externals define a version which satisfies it.
|
||||
|
||||
This is primarily for determining whether a version request (e.g.
|
||||
version preferences, which should not themselves define versions)
|
||||
refers to a defined version.
|
||||
|
||||
This function raises an exception if no satisfying versions are
|
||||
found.
|
||||
"""
|
||||
pkg_name = pkg_class.name
|
||||
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"Preference for version {0} does not match any version"
|
||||
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
|
||||
)
|
||||
return satisfying_versions
|
||||
|
||||
def add_concrete_versions_from_specs(self, specs, origin):
|
||||
def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: bool):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
for s in spack.traverse.traverse_nodes(specs):
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# If there is a concrete version on the CLI *that we know nothing
|
||||
# about*, add it to the known versions. Use idx=0, which is the
|
||||
# best possible, so they're guaranteed to be used preferentially.
|
||||
@@ -1960,9 +1960,13 @@ def add_concrete_versions_from_specs(self, specs, origin):
|
||||
if version is None or any(v == version for v in self.possible_versions[s.name]):
|
||||
continue
|
||||
|
||||
self.declared_versions[s.name].append(
|
||||
DeclaredVersion(version=version, idx=0, origin=origin)
|
||||
)
|
||||
if require_checksum and not _is_checksummed_git_version(version):
|
||||
raise UnsatisfiableSpecError(
|
||||
s.format("No matching version for constraint {name}{@versions}")
|
||||
)
|
||||
|
||||
declared = DeclaredVersion(version=version, idx=0, origin=origin)
|
||||
self.declared_versions[s.name].append(declared)
|
||||
self.possible_versions[s.name].add(version)
|
||||
|
||||
def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
@@ -2159,7 +2163,7 @@ def generate_possible_compilers(self, specs):
|
||||
# add compiler specs from the input line to possibilities if we
|
||||
# don't require compilers to exist.
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
for s in spack.traverse.traverse_nodes(specs):
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
@@ -2409,13 +2413,12 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(self.pkgs)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
# TODO: make a config option for this undocumented feature
|
||||
require_checksum = "SPACK_CONCRETIZER_REQUIRE_CHECKSUM" in os.environ
|
||||
self.define_package_versions_and_validate_preferences(self.pkgs, require_checksum)
|
||||
self.define_ad_hoc_versions_from_specs(specs, Provenance.SPEC, require_checksum)
|
||||
self.define_ad_hoc_versions_from_specs(dev_specs, Provenance.DEV_SPEC, require_checksum)
|
||||
self.validate_and_define_versions_from_requirements(require_checksum)
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
@@ -2464,78 +2467,68 @@ def literal_specs(self, specs):
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
def validate_and_define_versions_from_requirements(self, require_checksum: bool):
|
||||
"""If package requirements mention concrete versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
versions. If they are abstract and statically have no match, then we
|
||||
need to throw an error. This function assumes all possible versions are already
|
||||
registered in self.possible_versions."""
|
||||
for pkg_name, d in spack.config.get("packages").items():
|
||||
if pkg_name == "all" or "require" not in d:
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
for s in traverse.traverse_nodes(self._specs_from_requires(pkg_name, d["require"])):
|
||||
name, versions = s.name, s.versions
|
||||
|
||||
if name not in self.pkgs or versions == spack.version.any_version:
|
||||
continue
|
||||
|
||||
s.attach_git_version_lookup()
|
||||
v = versions.concrete
|
||||
|
||||
if not v:
|
||||
# If the version is not concrete, check it's statically concretizable. If
|
||||
# not throw an error, which is just so that users know they need to change
|
||||
# their config, instead of getting a hard to decipher concretization error.
|
||||
if not any(x for x in self.possible_versions[name] if x.satisfies(versions)):
|
||||
raise spack.config.ConfigError(
|
||||
f"Version requirement {versions} on {pkg_name} for {name} "
|
||||
f"cannot match any known version from package.py or externals"
|
||||
)
|
||||
continue
|
||||
|
||||
if v in self.possible_versions[name]:
|
||||
continue
|
||||
|
||||
# If concrete an not yet defined, conditionally define it, like we do for specs
|
||||
# from the command line.
|
||||
if not require_checksum or _is_checksummed_git_version(v):
|
||||
self.declared_versions[name].append(
|
||||
DeclaredVersion(version=v, idx=0, origin=Provenance.PACKAGE_REQUIREMENT)
|
||||
)
|
||||
self.possible_versions[name].add(v)
|
||||
|
||||
def _specs_from_requires(self, pkg_name, section):
|
||||
"""Collect specs from requirements which define versions (i.e. those that
|
||||
have a concrete version). Requirements can define *new* versions if
|
||||
they are included as part of an equivalence (hash=number) but not
|
||||
otherwise.
|
||||
"""
|
||||
"""Collect specs from a requirement rule"""
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
yield _spec_with_default_name(section, pkg_name)
|
||||
return
|
||||
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
if spec.versions.concrete:
|
||||
# Note: this includes git versions
|
||||
version_specs.append(spec)
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
yield _spec_with_default_name(spec_group, pkg_name)
|
||||
continue
|
||||
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
yield _spec_with_default_name(spec_group["spec"], pkg_name)
|
||||
continue
|
||||
|
||||
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||
# will end up here
|
||||
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||
version_specs.extend(vspecs)
|
||||
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
for s in spec_group[key]:
|
||||
yield _spec_with_default_name(s, pkg_name)
|
||||
|
||||
|
||||
class SpecBuilder:
|
||||
@@ -2953,6 +2946,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
@@ -2976,6 +2970,7 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
stats (bool): print internal statistics if set to True
|
||||
tests (bool): add test dependencies to the solve
|
||||
"""
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
@@ -113,6 +113,7 @@ unification_set(SetID, VirtualNode)
|
||||
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
|
||||
multiple_nodes_attribute("node_flag_source").
|
||||
multiple_nodes_attribute("depends_on").
|
||||
multiple_nodes_attribute("virtual_on_edge").
|
||||
|
||||
% Map constraint on the literal ID to facts on the node
|
||||
attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID).
|
||||
@@ -124,6 +125,9 @@ attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2
|
||||
attr("node_flag_source", node(min_dupe_id, A1), A2, node(min_dupe_id, A3)) :- literal(LiteralID, "node_flag_source", A1, A2, A3), solve_literal(LiteralID).
|
||||
attr("depends_on", node(min_dupe_id, A1), node(min_dupe_id, A2), A3) :- literal(LiteralID, "depends_on", A1, A2, A3), solve_literal(LiteralID).
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, A1)) :- literal(LiteralID, "root", A1), solve_literal(LiteralID).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
#defined literal/3.
|
||||
@@ -144,10 +148,10 @@ error(100, no_value_error, Attribute, Package)
|
||||
not attr(Attribute, node(ID, Package), _).
|
||||
|
||||
% Error when multiple attr need to be selected
|
||||
error(100, multiple_values_error, Attribute, PackageNode)
|
||||
:- attr("node", PackageNode),
|
||||
error(100, multiple_values_error, Attribute, Package)
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, PackageNode, Value) }.
|
||||
2 { attr(Attribute, node(ID, Package), Value) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
@@ -364,6 +368,18 @@ attr("node_flag_source", node(X, A1), A2, node(Y, A3))
|
||||
:- impose(ID, node(X, A1)),
|
||||
imposed_constraint(ID, "depends_on", A1, A2, A3).
|
||||
|
||||
% Reconstruct virtual dependencies for reused specs
|
||||
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual)
|
||||
:- impose(ID, node(X, A1)),
|
||||
depends_on(node(X, A1), node(Y, A2)),
|
||||
imposed_constraint(ID, "virtual_on_edge", A1, A2, Virtual),
|
||||
not build(node(X, A1)).
|
||||
|
||||
virtual_condition_holds(node(Y, A2), Virtual)
|
||||
:- impose(ID, node(X, A1)),
|
||||
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual),
|
||||
not build(node(X, A1)).
|
||||
|
||||
% we cannot have additional variant values when we are working with concrete specs
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("hash", node(ID, Package), Hash),
|
||||
@@ -478,13 +494,13 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
|
||||
error(100, "Cannot find valid provider for virtual {0}", VirtualNode)
|
||||
:- attr("virtual_node", VirtualNode),
|
||||
not provider(_, VirtualNode).
|
||||
error(100, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- attr("virtual_node", node(X, Virtual)),
|
||||
not provider(_, node(X, Virtual)).
|
||||
|
||||
error(100, "Cannot select a single provider for virtual '{0}'", VirtualNode)
|
||||
:- attr("virtual_node", VirtualNode),
|
||||
2 { provider(P, VirtualNode) }.
|
||||
error(100, "Cannot select a single provider for virtual '{0}'", Virtual)
|
||||
:- attr("virtual_node", node(X, Virtual)),
|
||||
2 { provider(P, node(X, Virtual)) }.
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
attr("virtual_node", VirtualNode) :- attr("virtual_root", VirtualNode).
|
||||
@@ -519,6 +535,19 @@ virtual_condition_holds(ID, node(ProviderID, Provider), Virtual) :-
|
||||
condition_holds(ID, node(ProviderID, Provider)),
|
||||
virtual(Virtual).
|
||||
|
||||
% If a "provider" condition holds, but this package is not a provider, do not impose the "provider" condition
|
||||
do_not_impose(EffectID, node(X, Package))
|
||||
:- virtual_condition_holds(ID, node(X, Package), Virtual),
|
||||
pkg_fact(Package, condition_effect(ID, EffectID)),
|
||||
not provider(node(X, Package), node(_, Virtual)).
|
||||
|
||||
% Choose the provider among root specs, if possible
|
||||
:- provider(ProviderNode, node(min_dupe_id, Virtual)),
|
||||
virtual_condition_holds(_, PossibleProvider, Virtual),
|
||||
PossibleProvider != ProviderNode,
|
||||
explicitly_requested_root(PossibleProvider),
|
||||
not explicitly_requested_root(ProviderNode).
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(PackageNode, node(VirtualID, Virtual)),
|
||||
@@ -727,23 +756,23 @@ attr("variant_value", node(ID, Package), Variant, Value) :-
|
||||
attr("variant_propagate", node(ID, Package), Variant, Value, _),
|
||||
pkg_fact(Package, variant_possible_value(Variant, Value)).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, PackageNode, Value1, Value2) :-
|
||||
attr("variant_propagate", PackageNode, Variant, Value1, Source1),
|
||||
attr("variant_propagate", PackageNode, Variant, Value2, Source2),
|
||||
node_has_variant(PackageNode, Variant),
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
|
||||
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
|
||||
node_has_variant(node(X, Package), Variant),
|
||||
Value1 < Value2.
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, PackageNode)
|
||||
:- attr("variant_set", PackageNode, Variant),
|
||||
not node_has_variant(PackageNode, Variant),
|
||||
build(PackageNode).
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- attr("variant_set", node(X, Package), Variant),
|
||||
not node_has_variant(node(X, Package), Variant),
|
||||
build(node(X, Package)).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, PackageNode)
|
||||
:- attr("variant_value", PackageNode, Variant, _),
|
||||
not node_has_variant(PackageNode, Variant),
|
||||
build(PackageNode).
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- attr("variant_value", node(X, Package), Variant, _),
|
||||
not node_has_variant(node(X, Package), Variant),
|
||||
build(node(X, Package)).
|
||||
|
||||
% if a variant is sticky and not set its value is the default value
|
||||
attr("variant_value", node(ID, Package), Variant, Value) :-
|
||||
@@ -770,11 +799,11 @@ error(100, "'{0}' required multiple values for single-valued variant '{1}'", Pac
|
||||
build(node(ID, Package)),
|
||||
2 { attr("variant_value", node(ID, Package), Variant, Value) }.
|
||||
|
||||
error(100, "No valid value for variant '{1}' of package '{0}'", PackageNode, Variant)
|
||||
:- attr("node", PackageNode),
|
||||
node_has_variant(PackageNode, Variant),
|
||||
build(PackageNode),
|
||||
not attr("variant_value", PackageNode, Variant, _).
|
||||
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- attr("node", node(X, Package)),
|
||||
node_has_variant(node(X, Package), Variant),
|
||||
build(node(X, Package)),
|
||||
not attr("variant_value", node(X, Package), Variant, _).
|
||||
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
attr("variant_set", PackageNode, Variant) :- attr("variant_set", PackageNode, Variant, _).
|
||||
@@ -858,11 +887,11 @@ variant_default_value(Package, Variant, Value) :-
|
||||
|
||||
% Treat 'none' in a special way - it cannot be combined with other
|
||||
% values even if the variant is multi-valued
|
||||
error(100, "{0} variant '{1}' cannot have values '{2}' and 'none'", PackageNode, Variant, Value)
|
||||
:- attr("variant_value", PackageNode, Variant, Value),
|
||||
attr("variant_value", PackageNode, Variant, "none"),
|
||||
error(100, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
:- attr("variant_value", node(X, Package), Variant, Value),
|
||||
attr("variant_value", node(X, Package), Variant, "none"),
|
||||
Value != "none",
|
||||
build(PackageNode).
|
||||
build(node(X, Package)).
|
||||
|
||||
% patches and dev_path are special variants -- they don't have to be
|
||||
% declared in the package, so we just allow them to spring into existence
|
||||
@@ -911,18 +940,18 @@ os(OS) :- os(OS, _).
|
||||
{ attr("node_os", PackageNode, OS) : os(OS) } :- attr("node", PackageNode).
|
||||
|
||||
% can't have a non-buildable OS on a node we need to build
|
||||
error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)", PackageNode, OS)
|
||||
:- build(PackageNode),
|
||||
attr("node_os", PackageNode, OS),
|
||||
error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)", Package, OS)
|
||||
:- build(node(X, Package)),
|
||||
attr("node_os", node(X, Package), OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", PackageNode, DependencyNode, PackageNodeOS, DependencyOS)
|
||||
:- depends_on(PackageNode, DependencyNode),
|
||||
attr("node_os", PackageNode, PackageNodeOS),
|
||||
attr("node_os", DependencyNode, DependencyOS),
|
||||
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
|
||||
:- depends_on(node(X, Package), node(Y, Dependency)),
|
||||
attr("node_os", node(X, Package), PackageNodeOS),
|
||||
attr("node_os", node(Y, Dependency), DependencyOS),
|
||||
not os_compatible(PackageNodeOS, DependencyOS),
|
||||
build(PackageNode).
|
||||
build(node(X, Package)).
|
||||
|
||||
% give OS choice weights according to os declarations
|
||||
node_os_weight(PackageNode, Weight)
|
||||
@@ -966,9 +995,9 @@ attr("node_os", PackageNode, OS) :- attr("node_os_set", PackageNode, OS), attr("
|
||||
{ attr("node_target", PackageNode, Target) : target(Target) } :- attr("node", PackageNode).
|
||||
|
||||
% If a node must satisfy a target constraint, enforce it
|
||||
error(10, "'{0} target={1}' cannot satisfy constraint 'target={2}'", PackageNode, Target, Constraint)
|
||||
:- attr("node_target", PackageNode, Target),
|
||||
attr("node_target_satisfies", PackageNode, Constraint),
|
||||
error(10, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
:- attr("node_target", node(X, Package), Target),
|
||||
attr("node_target_satisfies", node(X, Package), Constraint),
|
||||
not target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target and the target satisfies a constraint, then the target
|
||||
@@ -977,10 +1006,10 @@ attr("node_target_satisfies", PackageNode, Constraint)
|
||||
:- attr("node_target", PackageNode, Target), target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target, all of its dependencies must be compatible with that target
|
||||
error(100, "Cannot find compatible targets for {0} and {1}", PackageNode, DependencyNode)
|
||||
:- depends_on(PackageNode, DependencyNode),
|
||||
attr("node_target", PackageNode, Target),
|
||||
not node_target_compatible(DependencyNode, Target).
|
||||
error(100, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
:- depends_on(node(X, Package), node(Y, Dependency)),
|
||||
attr("node_target", node(X, Package), Target),
|
||||
not node_target_compatible(node(Y, Dependency), Target).
|
||||
|
||||
% Intermediate step for performance reasons
|
||||
% When the integrity constraint above was formulated including this logic
|
||||
@@ -992,13 +1021,13 @@ node_target_compatible(PackageNode, Target)
|
||||
#defined target_satisfies/2.
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", PackageNode, Target, Compiler, Version)
|
||||
:- attr("node_target", PackageNode, Target),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- attr("node_target", node(X, Package), Target),
|
||||
node_compiler(node(X, Package), CompilerID),
|
||||
not compiler_supports_target(CompilerID, Target),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
build(PackageNode).
|
||||
build(node(X, Package)).
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
attr("node_target", PackageNode, Target)
|
||||
@@ -1021,9 +1050,9 @@ node_target_mismatch(ParentNode, DependencyNode)
|
||||
not node_target_match(ParentNode, DependencyNode).
|
||||
|
||||
% disallow reusing concrete specs that don't have a compatible target
|
||||
error(100, "'{0} target={1}' is not compatible with this machine", PackageNode, Target)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_target", PackageNode, Target),
|
||||
error(100, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_target", node(X, Package), Target),
|
||||
not target(Target).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1052,33 +1081,33 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
|
||||
attr("node_compiler", PackageNode, CompilerName)
|
||||
:- attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion).
|
||||
|
||||
error(100, "No valid compiler version found for '{0}'", PackageNode)
|
||||
:- attr("node", PackageNode),
|
||||
not node_compiler(PackageNode, _).
|
||||
error(100, "No valid compiler version found for '{0}'", Package)
|
||||
:- attr("node", node(X, Package)),
|
||||
not node_compiler(node(X, Package), _).
|
||||
|
||||
% We can't have a compiler be enforced and select the version from another compiler
|
||||
error(100, "Cannot select a single compiler for package {0}", PackageNode)
|
||||
:- attr("node", PackageNode),
|
||||
2 { attr("node_compiler_version", PackageNode, C, V) }.
|
||||
error(100, "Cannot select a single compiler for package {0}", Package)
|
||||
:- attr("node", node(X, Package)),
|
||||
2 { attr("node_compiler_version", node(X, Package), C, V) }.
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(10, "No valid compiler for {0} satisfies '%{1}'", PackageNode, Compiler)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_compiler_version_satisfies", PackageNode, Compiler, ":"),
|
||||
error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, ":"),
|
||||
not compiler_version_satisfies(Compiler, ":", _).
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", PackageNode, Compiler, Constraint)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_compiler_version_satisfies", PackageNode, Compiler, Constraint),
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", PackageNode, Compiler, Constraint)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_compiler_version_satisfies", PackageNode, Compiler, Constraint),
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", node(X, Package)),
|
||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||
node_compiler(PackageNode, ID).
|
||||
node_compiler(node(X, Package), ID).
|
||||
|
||||
% If the node is associated with a compiler and the compiler satisfy a constraint, then
|
||||
% the compiler associated with the node satisfy the same constraint
|
||||
@@ -1100,14 +1129,14 @@ attr("node_compiler_version_satisfies", PackageNode, Compiler, Constraint)
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", PackageNode, Compiler, Version, OS)
|
||||
:- attr("node_os", PackageNode, OS),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- attr("node_os", node(X, Package), OS),
|
||||
node_compiler(node(X, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
not compiler_os(CompilerID, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(PackageNode).
|
||||
build(node(X, Package)).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves
|
||||
% Heuristic to speed-up solves (node with ID 0)
|
||||
%=============================================================================
|
||||
|
||||
|
||||
@@ -27,18 +27,3 @@
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
|
||||
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
@@ -0,0 +1,24 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves (node with ID > 0)
|
||||
%=============================================================================
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
@@ -112,50 +112,49 @@
|
||||
"UnsatisfiableDependencySpecError",
|
||||
"AmbiguousHashError",
|
||||
"InvalidHashError",
|
||||
"RedundantSpecError",
|
||||
"SpecDeprecatedError",
|
||||
]
|
||||
|
||||
#: Valid pattern for an identifier in Spack
|
||||
|
||||
identifier_re = r"\w[\w-]*"
|
||||
IDENTIFIER_RE = r"\w[\w-]*"
|
||||
|
||||
compiler_color = "@g" #: color for highlighting compilers
|
||||
version_color = "@c" #: color for highlighting versions
|
||||
architecture_color = "@m" #: color for highlighting architectures
|
||||
enabled_variant_color = "@B" #: color for highlighting enabled variants
|
||||
disabled_variant_color = "r" #: color for highlighting disabled varaints
|
||||
dependency_color = "@." #: color for highlighting dependencies
|
||||
hash_color = "@K" #: color for highlighting package hashes
|
||||
COMPILER_COLOR = "@g" #: color for highlighting compilers
|
||||
VERSION_COLOR = "@c" #: color for highlighting versions
|
||||
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
|
||||
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
|
||||
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
|
||||
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
|
||||
HASH_COLOR = "@K" #: color for highlighting package hashes
|
||||
|
||||
#: This map determines the coloring of specs when using color output.
|
||||
#: We make the fields different colors to enhance readability.
|
||||
#: See llnl.util.tty.color for descriptions of the color codes.
|
||||
color_formats = {
|
||||
"%": compiler_color,
|
||||
"@": version_color,
|
||||
"=": architecture_color,
|
||||
"+": enabled_variant_color,
|
||||
"~": disabled_variant_color,
|
||||
"^": dependency_color,
|
||||
"#": hash_color,
|
||||
COLOR_FORMATS = {
|
||||
"%": COMPILER_COLOR,
|
||||
"@": VERSION_COLOR,
|
||||
"=": ARCHITECTURE_COLOR,
|
||||
"+": ENABLED_VARIANT_COLOR,
|
||||
"~": DISABLED_VARIANT_COLOR,
|
||||
"^": DEPENDENCY_COLOR,
|
||||
"#": HASH_COLOR,
|
||||
}
|
||||
|
||||
#: Regex used for splitting by spec field separators.
|
||||
#: These need to be escaped to avoid metacharacters in
|
||||
#: ``color_formats.keys()``.
|
||||
_separators = "[\\%s]" % "\\".join(color_formats.keys())
|
||||
#: ``COLOR_FORMATS.keys()``.
|
||||
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
|
||||
|
||||
#: Default format for Spec.format(). This format can be round-tripped, so that:
|
||||
#: Spec(Spec("string").format()) == Spec("string)"
|
||||
default_format = (
|
||||
DEFAULT_FORMAT = (
|
||||
"{name}{@versions}"
|
||||
"{%compiler.name}{@compiler.versions}{compiler_flags}"
|
||||
"{variants}{arch=architecture}{/abstract_hash}"
|
||||
)
|
||||
|
||||
#: Display format, which eliminates extra `@=` in the output, for readability.
|
||||
display_format = (
|
||||
DISPLAY_FORMAT = (
|
||||
"{name}{@version}"
|
||||
"{%compiler.name}{@compiler.version}{compiler_flags}"
|
||||
"{variants}{arch=architecture}{/abstract_hash}"
|
||||
@@ -187,7 +186,7 @@ class InstallStatus(enum.Enum):
|
||||
|
||||
def colorize_spec(spec):
|
||||
"""Returns a spec colorized according to the colors specified in
|
||||
color_formats."""
|
||||
COLOR_FORMATS."""
|
||||
|
||||
class insert_color:
|
||||
def __init__(self):
|
||||
@@ -200,9 +199,9 @@ def __call__(self, match):
|
||||
return clr.cescape(sep)
|
||||
self.last = sep
|
||||
|
||||
return "%s%s" % (color_formats[sep], clr.cescape(sep))
|
||||
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
|
||||
|
||||
return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
|
||||
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
@@ -985,16 +984,14 @@ def __iter__(self):
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge):
|
||||
"""Adds a new edge to this object.
|
||||
|
||||
Args:
|
||||
edge (DependencySpec): edge to be added
|
||||
"""
|
||||
def add(self, edge: DependencySpec):
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
current_list = self.edges.setdefault(key, [])
|
||||
current_list.append(edge)
|
||||
current_list.sort(key=_sort_by_dep_types)
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
lst.append(edge)
|
||||
lst.sort(key=_sort_by_dep_types)
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
@@ -1927,19 +1924,15 @@ def _lookup_hash(self):
|
||||
store, or finally, binary caches."""
|
||||
import spack.environment
|
||||
|
||||
matches = []
|
||||
active_env = spack.environment.active_environment()
|
||||
|
||||
if active_env:
|
||||
env_matches = active_env.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in env_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in db_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
query = spack.binary_distribution.BinaryCacheQuery(True)
|
||||
remote_matches = query("/" + self.abstract_hash) or []
|
||||
matches = [m for m in remote_matches if m._satisfies(self)]
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(self) if active_env else [])
|
||||
or spack.store.STORE.db.query(self, installed=any)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||
)
|
||||
|
||||
if not matches:
|
||||
raise InvalidHashError(self, self.abstract_hash)
|
||||
|
||||
@@ -1960,19 +1953,17 @@ def lookup_hash(self):
|
||||
spec = self.copy(deps=False)
|
||||
# root spec is replaced
|
||||
if spec.abstract_hash:
|
||||
new = self._lookup_hash()
|
||||
spec._dup(new)
|
||||
spec._dup(self._lookup_hash())
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
@@ -1985,9 +1976,7 @@ def replace_hash(self):
|
||||
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
||||
return
|
||||
|
||||
spec_by_hash = self.lookup_hash()
|
||||
|
||||
self._dup(spec_by_hash)
|
||||
self._dup(self.lookup_hash())
|
||||
|
||||
def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary representing the state of this Spec.
|
||||
@@ -3723,15 +3712,19 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._intersects(rhs, deps)
|
||||
|
||||
def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if other.concrete and self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
|
||||
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
|
||||
|
||||
if (
|
||||
self_hash
|
||||
and other_hash
|
||||
and not (self_hash.startswith(other_hash) or other_hash.startswith(self_hash))
|
||||
):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
if self.virtual and other.virtual:
|
||||
@@ -3791,19 +3784,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
return self._intersects_dependencies(other)
|
||||
else:
|
||||
return True
|
||||
|
||||
def satisfies(self, other, deps=True):
|
||||
"""
|
||||
This checks constraints on common dependencies against each other.
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._satisfies(rhs, deps=deps)
|
||||
return True
|
||||
|
||||
def _intersects_dependencies(self, other):
|
||||
if not other._dependencies or not self._dependencies:
|
||||
@@ -3840,7 +3822,7 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
return True
|
||||
|
||||
def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
@@ -3855,6 +3837,13 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# objects.
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the right-hand side has an abstract hash, make sure it's a prefix of the
|
||||
# left-hand side's (abstract) hash.
|
||||
if other.abstract_hash:
|
||||
compare_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
if not compare_hash or not compare_hash.startswith(other.abstract_hash):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
@@ -4231,9 +4220,7 @@ def eq_node(self, other):
|
||||
def _cmp_iter(self):
|
||||
"""Lazily yield components of self for comparison."""
|
||||
|
||||
cmp_spec = self.lookup_hash() or self
|
||||
|
||||
for item in cmp_spec._cmp_node():
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
@@ -4244,10 +4231,10 @@ def _cmp_iter(self):
|
||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||
# TODO: spec hashing.
|
||||
yield cmp_spec.process_hash() if cmp_spec.concrete else None
|
||||
yield self.process_hash() if self.concrete else None
|
||||
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
yield tuple(sorted(dep.deptypes))
|
||||
yield hash(dep.spec)
|
||||
@@ -4257,7 +4244,7 @@ def deps():
|
||||
def colorized(self):
|
||||
return colorize_spec(self)
|
||||
|
||||
def format(self, format_string=default_format, **kwargs):
|
||||
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
|
||||
r"""Prints out particular pieces of a spec, depending on what is
|
||||
in the format string.
|
||||
|
||||
@@ -4336,7 +4323,7 @@ def format(self, format_string=default_format, **kwargs):
|
||||
def write(s, c=None):
|
||||
f = clr.cescape(s)
|
||||
if c is not None:
|
||||
f = color_formats[c] + f + "@."
|
||||
f = COLOR_FORMATS[c] + f + "@."
|
||||
clr.cwrite(f, stream=out, color=color)
|
||||
|
||||
def write_attribute(spec, attribute, color):
|
||||
@@ -4535,7 +4522,7 @@ def tree(self, **kwargs):
|
||||
status_fn = kwargs.pop("status_fn", False)
|
||||
cover = kwargs.pop("cover", "nodes")
|
||||
indent = kwargs.pop("indent", 0)
|
||||
fmt = kwargs.pop("format", default_format)
|
||||
fmt = kwargs.pop("format", DEFAULT_FORMAT)
|
||||
prefix = kwargs.pop("prefix", None)
|
||||
show_types = kwargs.pop("show_types", False)
|
||||
deptypes = kwargs.pop("deptypes", "all")
|
||||
@@ -5343,14 +5330,6 @@ class NoSuchSpecFileError(SpecFilenameError):
|
||||
"""Raised when a spec file doesn't exist."""
|
||||
|
||||
|
||||
class RedundantSpecError(spack.error.SpecError):
|
||||
def __init__(self, spec, addition):
|
||||
super().__init__(
|
||||
"Attempting to add %s to spec %s which is already concrete."
|
||||
" This is likely the result of adding to a spec specified by hash." % (addition, spec)
|
||||
)
|
||||
|
||||
|
||||
class SpecFormatStringError(spack.error.SpecError):
|
||||
"""Called for errors in Spec format strings."""
|
||||
|
||||
|
||||
@@ -97,8 +97,10 @@ def remove(self, spec):
|
||||
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
|
||||
msg += "expanded from a matrix and cannot be removed directly."
|
||||
raise SpecListError(msg)
|
||||
assert len(remove) == 1
|
||||
self.yaml_list.remove(remove[0])
|
||||
|
||||
# Remove may contain more than one string representation of the same spec
|
||||
for item in remove:
|
||||
self.yaml_list.remove(item)
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._expanded_list = None
|
||||
@@ -197,7 +199,9 @@ def _expand_matrix_constraints(matrix_config):
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
flat_combo = [Spec(x) for x in flat_combo]
|
||||
|
||||
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -223,6 +224,7 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
|
||||
(["21.11", "21.9"], None, False),
|
||||
],
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cray does not use windows")
|
||||
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
|
||||
ex_path = str(tmpdir.join("fake_craype_dir"))
|
||||
fs.mkdirp(ex_path)
|
||||
|
||||
@@ -16,8 +16,9 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
|
||||
from spack.build_environment import _static_to_shared_library, dso_suffix
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import Path, convert_to_platform_path
|
||||
@@ -442,7 +443,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
||||
|
||||
spack.build_environment.set_module_variables_for_package(s["b"].package)
|
||||
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||
s["b"].package.parallel
|
||||
parallel=s["b"].package.parallel
|
||||
)
|
||||
|
||||
|
||||
@@ -474,28 +475,62 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
|
||||
|
||||
def test_build_jobs_sequential_is_sequential():
|
||||
assert (
|
||||
determine_number_of_jobs(parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
|
||||
determine_number_of_jobs(
|
||||
parallel=False,
|
||||
max_cpus=8,
|
||||
config=spack.config.Configuration(
|
||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
||||
),
|
||||
)
|
||||
== 1
|
||||
)
|
||||
|
||||
|
||||
def test_build_jobs_command_line_overrides():
|
||||
assert (
|
||||
determine_number_of_jobs(parallel=True, command_line=10, config_default=1, max_cpus=1)
|
||||
determine_number_of_jobs(
|
||||
parallel=True,
|
||||
max_cpus=1,
|
||||
config=spack.config.Configuration(
|
||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
||||
),
|
||||
)
|
||||
== 10
|
||||
)
|
||||
assert (
|
||||
determine_number_of_jobs(parallel=True, command_line=10, config_default=100, max_cpus=100)
|
||||
determine_number_of_jobs(
|
||||
parallel=True,
|
||||
max_cpus=100,
|
||||
config=spack.config.Configuration(
|
||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
||||
),
|
||||
)
|
||||
== 10
|
||||
)
|
||||
|
||||
|
||||
def test_build_jobs_defaults():
|
||||
assert (
|
||||
determine_number_of_jobs(parallel=True, command_line=None, config_default=1, max_cpus=10)
|
||||
determine_number_of_jobs(
|
||||
parallel=True,
|
||||
max_cpus=10,
|
||||
config=spack.config.Configuration(
|
||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
||||
),
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
determine_number_of_jobs(parallel=True, command_line=None, config_default=100, max_cpus=10)
|
||||
determine_number_of_jobs(
|
||||
parallel=True,
|
||||
max_cpus=10,
|
||||
config=spack.config.Configuration(
|
||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
||||
),
|
||||
)
|
||||
== 10
|
||||
)
|
||||
|
||||
|
||||
@@ -31,13 +31,16 @@ def test_fetch_missing_cache(tmpdir, _fetch_method):
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_fetch(tmpdir, _fetch_method):
|
||||
"""Ensure a fetch after expanding is effectively a no-op."""
|
||||
testpath = str(tmpdir)
|
||||
cache = os.path.join(testpath, "cache.tar.gz")
|
||||
cache_dir = tmpdir.join("cache")
|
||||
stage_dir = tmpdir.join("stage")
|
||||
mkdirp(cache_dir)
|
||||
mkdirp(stage_dir)
|
||||
cache = os.path.join(cache_dir, "cache.tar.gz")
|
||||
touch(cache)
|
||||
url = url_util.path_to_file_url(cache)
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
fetcher = CacheURLFetchStrategy(url=url)
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
with Stage(fetcher, path=str(stage_dir)) as stage:
|
||||
source_path = stage.source_path
|
||||
mkdirp(source_path)
|
||||
fetcher.fetch()
|
||||
|
||||
@@ -42,45 +42,36 @@ def define_plat_exe(exe):
|
||||
return exe
|
||||
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_find_external_single_package(mock_executable):
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
search_dir = cmake_path.parent.parent
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
|
||||
|
||||
pkg, entries = next(iter(pkg_to_entries.items()))
|
||||
single_entry = next(iter(entries))
|
||||
|
||||
assert single_entry.spec == Spec("cmake@1.foo")
|
||||
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
|
||||
detected_spec = specs_by_package["cmake"]
|
||||
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(
|
||||
mock_executable, executables_found, _platform_executables
|
||||
):
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable, _platform_executables):
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
# we need to avoid that for proper regex.
|
||||
cmake_path1 = mock_executable(
|
||||
"cmake", output="echo cmake version 1.foo", subdir=("base1", "bin")
|
||||
)
|
||||
cmake_path2 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
|
||||
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
|
||||
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(pkg_name="cmake", initial_guess=search_paths)
|
||||
|
||||
pkg, entries = next(iter(pkg_to_entries.items()))
|
||||
spec_to_path = dict((e.spec, e.prefix) for e in entries)
|
||||
assert len(detected_specs) == 2
|
||||
spec_to_path = {e.spec: e.prefix for e in detected_specs}
|
||||
assert spec_to_path[Spec("cmake@1.foo")] == (
|
||||
spack.detection.executable_prefix(os.path.dirname(cmake_path1))
|
||||
spack.detection.executable_prefix(str(cmake1.parent))
|
||||
)
|
||||
assert spec_to_path[Spec("cmake@3.17.2")] == (
|
||||
spack.detection.executable_prefix(os.path.dirname(cmake_path2))
|
||||
spack.detection.executable_prefix(str(cmake2.parent))
|
||||
)
|
||||
|
||||
|
||||
@@ -112,23 +103,6 @@ def test_get_executables(working_env, mock_executable):
|
||||
external = SpackCommand("external")
|
||||
|
||||
|
||||
def test_find_external_cmd(mutable_config, working_env, mock_executable, _platform_executables):
|
||||
"""Test invoking 'spack external find' with additional package arguments,
|
||||
which restricts the set of packages that Spack looks for.
|
||||
"""
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
prefix = os.path.dirname(os.path.dirname(cmake_path1))
|
||||
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
external("find", "cmake")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
cmake_cfg = pkgs_cfg["cmake"]
|
||||
cmake_externals = cmake_cfg["externals"]
|
||||
|
||||
assert {"spec": "cmake@1.foo", "prefix": prefix} in cmake_externals
|
||||
|
||||
|
||||
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
|
||||
"""When the user invokes 'spack external find --not-buildable', the config
|
||||
for any package where Spack finds an external version should be marked as
|
||||
@@ -138,50 +112,29 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
external("find", "--not-buildable", "cmake")
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
assert "cmake" in pkgs_cfg
|
||||
assert not pkgs_cfg["cmake"]["buildable"]
|
||||
|
||||
|
||||
def test_find_external_cmd_full_repo(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
|
||||
):
|
||||
"""Test invoking 'spack external find' with no additional arguments, which
|
||||
iterates through each package in the repository.
|
||||
"""
|
||||
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
|
||||
prefix = os.path.dirname(os.path.dirname(exe_path1))
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
|
||||
external("find", "--all")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
pkg_cfg = pkgs_cfg["find-externals1"]
|
||||
pkg_externals = pkg_cfg["externals"]
|
||||
|
||||
assert {"spec": "find-externals1@1.foo", "prefix": prefix} in pkg_externals
|
||||
@pytest.mark.parametrize(
|
||||
"names,tags,exclude,expected",
|
||||
[
|
||||
# find --all
|
||||
(None, ["detectable"], [], ["find-externals1"]),
|
||||
# find --all --exclude find-externals1
|
||||
(None, ["detectable"], ["find-externals1"], []),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
],
|
||||
)
|
||||
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
|
||||
"""Tests various cases of selecting packages"""
|
||||
# In the mock repo we only have 'find-externals1' that is detectable
|
||||
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
|
||||
assert set(result) == set(expected)
|
||||
|
||||
|
||||
def test_find_external_cmd_exclude(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
|
||||
):
|
||||
"""Test invoking 'spack external find --all --exclude', to ensure arbitary
|
||||
external packages can be ignored.
|
||||
"""
|
||||
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
|
||||
external("find", "--all", "--exclude=find-externals1")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
|
||||
assert "find-externals1" not in pkgs_cfg.keys()
|
||||
|
||||
|
||||
def test_find_external_no_manifest(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
monkeypatch,
|
||||
):
|
||||
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files does not exist. Ensure that the command does not
|
||||
fail.
|
||||
@@ -194,13 +147,7 @@ def test_find_external_no_manifest(
|
||||
|
||||
|
||||
def test_find_external_empty_default_manifest_dir(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but is empty. Ensure that the command does not
|
||||
@@ -215,13 +162,7 @@ def test_find_external_empty_default_manifest_dir(
|
||||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
@@ -262,12 +203,7 @@ def fail():
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
directory_with_manifest,
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
@@ -312,6 +248,7 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
||||
assert external.returncode == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
# Prepare an environment to detect a fake gcc
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
@@ -337,11 +274,8 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
# Prepare an environment to detect a fake gcc that
|
||||
# override its external prefix
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
prefix = os.path.dirname(gcc_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
search_dir = gcc_exe.parent
|
||||
|
||||
@classmethod
|
||||
def _determine_variants(cls, exes, version_str):
|
||||
@@ -350,18 +284,17 @@ def _determine_variants(cls, exes, version_str):
|
||||
gcc_cls = spack.repo.PATH.get_pkg_class("gcc")
|
||||
monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
|
||||
|
||||
# Find the external spec
|
||||
external("find", "gcc")
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(pkg_name="gcc", initial_guess=[str(search_dir)])
|
||||
|
||||
# Check entries in 'packages.yaml'
|
||||
packages_yaml = spack.config.get("packages")
|
||||
assert "gcc" in packages_yaml
|
||||
assert "externals" in packages_yaml["gcc"]
|
||||
externals = packages_yaml["gcc"]["externals"]
|
||||
assert len(externals) == 1
|
||||
assert externals[0]["prefix"] == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
assert len(detected_specs) == 1
|
||||
|
||||
gcc = detected_specs[0].spec
|
||||
assert gcc.name == "gcc"
|
||||
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
|
||||
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_new_entries_are_reported_correctly(
|
||||
mock_executable, mutable_config, monkeypatch, _platform_executables
|
||||
):
|
||||
|
||||
@@ -21,10 +21,11 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.version import ver
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
||||
def check_spec(abstract, concrete):
|
||||
@@ -1598,8 +1599,6 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, monkeypatch):
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_best_effort_coconcretize(self, specs, expected):
|
||||
import spack.solver.asp
|
||||
|
||||
specs = [Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
@@ -1643,8 +1642,6 @@ def test_best_effort_coconcretize(self, specs, expected):
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
|
||||
specs = [Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
@@ -1660,8 +1657,6 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
import spack.solver.asp
|
||||
|
||||
reusable_specs = []
|
||||
for s in ["mpileaks ^mpich", "zmpi"]:
|
||||
reusable_specs.extend(Spec(s).concretized().traverse(root=True))
|
||||
@@ -1682,8 +1677,6 @@ def test_misleading_error_message_on_version(self, mutable_database):
|
||||
# For this bug to be triggered we need a reusable dependency
|
||||
# that is not optimal in terms of optimization scores.
|
||||
# We pick an old version of "b"
|
||||
import spack.solver.asp
|
||||
|
||||
reusable_specs = [Spec("non-existing-conditional-dep@1.0").concretized()]
|
||||
root_spec = Spec("non-existing-conditional-dep@2.0")
|
||||
|
||||
@@ -1699,8 +1692,6 @@ def test_misleading_error_message_on_version(self, mutable_database):
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_version_weight_and_provenance(self):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
|
||||
reusable_specs = [Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")]
|
||||
root_spec = Spec("a foobar=bar")
|
||||
|
||||
@@ -1732,8 +1723,6 @@ def test_version_weight_and_provenance(self):
|
||||
@pytest.mark.regression("31169")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
import spack.solver.asp
|
||||
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
@@ -1954,7 +1943,7 @@ def test_external_python_extension_find_dependency_from_detection(self, monkeypa
|
||||
def find_fake_python(classes, path_hints):
|
||||
return {"python": [spack.detection.DetectedPackage(python_spec, prefix=path_hints[0])]}
|
||||
|
||||
monkeypatch.setattr(spack.detection, "by_executable", find_fake_python)
|
||||
monkeypatch.setattr(spack.detection, "by_path", find_fake_python)
|
||||
external_conf = {
|
||||
"py-extension1": {
|
||||
"buildable": False,
|
||||
@@ -2099,6 +2088,29 @@ def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretiza
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@pytest.mark.db
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,mpi_name",
|
||||
[("mpileaks", "mpich"), ("mpileaks ^mpich2", "mpich2"), ("mpileaks ^zmpi", "zmpi")],
|
||||
)
|
||||
def test_virtuals_are_reconstructed_on_reuse(self, spec_str, mpi_name, database):
|
||||
"""Tests that when we reuse a spec, virtual on edges are reconstructed correctly"""
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
spec = Spec(spec_str).concretized()
|
||||
assert spec.installed
|
||||
mpi_edges = spec.edges_to_dependencies(mpi_name)
|
||||
assert len(mpi_edges) == 1
|
||||
assert "mpi" in mpi_edges[0].virtuals
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_dont_define_new_version_from_input_if_checksum_required(self, working_env):
|
||||
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
# normally spack concretizes to @=3.0 if it's not defined in package.py, except
|
||||
# when checksums are required
|
||||
Spec("a@=3.0").concretized()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
@@ -2193,3 +2205,39 @@ def test_solution_without_cycles(self):
|
||||
s = Spec("cycle-b").concretized()
|
||||
assert s["cycle-a"].satisfies("~cycle")
|
||||
assert s["cycle-b"].satisfies("+cycle")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"v_str,v_opts,checksummed",
|
||||
[
|
||||
("1.2.3", {"sha256": f"{1:064x}"}, True),
|
||||
# it's not about the version being "infinite",
|
||||
# but whether it has a digest
|
||||
("develop", {"sha256": f"{1:064x}"}, True),
|
||||
# other hash types
|
||||
("1.2.3", {"checksum": f"{1:064x}"}, True),
|
||||
("1.2.3", {"md5": f"{1:032x}"}, True),
|
||||
("1.2.3", {"sha1": f"{1:040x}"}, True),
|
||||
("1.2.3", {"sha224": f"{1:056x}"}, True),
|
||||
("1.2.3", {"sha384": f"{1:096x}"}, True),
|
||||
("1.2.3", {"sha512": f"{1:0128x}"}, True),
|
||||
# no digest key
|
||||
("1.2.3", {"bogus": f"{1:064x}"}, False),
|
||||
# git version with full commit sha
|
||||
("1.2.3", {"commit": f"{1:040x}"}, True),
|
||||
(f"{1:040x}=1.2.3", {}, True),
|
||||
# git version with short commit sha
|
||||
("1.2.3", {"commit": f"{1:07x}"}, False),
|
||||
(f"{1:07x}=1.2.3", {}, False),
|
||||
# git tag is a moving target
|
||||
("1.2.3", {"tag": "v1.2.3"}, False),
|
||||
("1.2.3", {"tag": "v1.2.3", "commit": f"{1:07x}"}, False),
|
||||
# git branch is a moving target
|
||||
("1.2.3", {"branch": "releases/1.2"}, False),
|
||||
# git ref is a moving target
|
||||
("git.branch=1.2.3", {}, False),
|
||||
],
|
||||
)
|
||||
def test_drop_moving_targets(v_str, v_opts, checksummed):
|
||||
v = Version(v_str)
|
||||
assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
@@ -299,9 +300,14 @@ def test_requirement_adds_version_satisfies(
|
||||
assert s1.satisfies("@2.2")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("require_checksum", (True, False))
|
||||
def test_requirement_adds_git_hash_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
|
||||
):
|
||||
# A full commit sha is a checksummed version, so this test should pass in both cases
|
||||
if require_checksum:
|
||||
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
|
||||
@@ -277,6 +277,25 @@ def test_add_config_path(mutable_config):
|
||||
compilers = spack.config.get("packages")["all"]["compiler"]
|
||||
assert "gcc" in compilers
|
||||
|
||||
# Try quotes to escape brackets
|
||||
path = "config:install_tree:projections:cmake:\
|
||||
'{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
spack.config.add(path)
|
||||
set_value = spack.config.get("config")["install_tree"]["projections"]["cmake"]
|
||||
assert set_value == "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
|
||||
# NOTE:
|
||||
# The config path: "config:install_tree:root:<path>" is unique in that it can accept multiple
|
||||
# schemas (such as a dropped "root" component) which is atypical and may lead to passing tests
|
||||
# when the behavior is in reality incorrect.
|
||||
# the config path below is such that no subkey accepts a string as a valid entry in our schema
|
||||
|
||||
# try quotes to escape colons
|
||||
path = "config:build_stage:'C:\\path\\to\\config.yaml'"
|
||||
spack.config.add(path)
|
||||
set_value = spack.config.get("config")["build_stage"]
|
||||
assert "C:\\path\\to\\config.yaml" in set_value
|
||||
|
||||
|
||||
@pytest.mark.regression("17543,23259")
|
||||
def test_add_config_path_with_enumerated_type(mutable_config):
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.lock
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, working_dir
|
||||
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, touchp, working_dir
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.caches
|
||||
@@ -565,6 +565,8 @@ def mock_repo_path():
|
||||
def _pkg_install_fn(pkg, spec, prefix):
|
||||
# sanity_check_prefix requires something in the install directory
|
||||
mkdirp(prefix.bin)
|
||||
if not os.path.exists(spec.package.build_log_path):
|
||||
touchp(spec.package.build_log_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -13,7 +13,11 @@
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
from spack.environment.environment import SpackEnvironmentViewError, _error_on_nonempty_view_dir
|
||||
from spack.environment.environment import (
|
||||
EnvironmentManifestFile,
|
||||
SpackEnvironmentViewError,
|
||||
_error_on_nonempty_view_dir,
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
|
||||
|
||||
@@ -587,33 +591,102 @@ def test_conflicts_with_packages_that_are_not_dependencies(
|
||||
assert any(s.satisfies(expected_spec) for s in e.concrete_roots())
|
||||
|
||||
|
||||
def test_requires_on_virtual_and_potential_providers(tmp_path, mock_packages, config):
|
||||
@pytest.mark.regression("39455")
|
||||
@pytest.mark.only_clingo("Known failure of the original concretizer")
|
||||
@pytest.mark.parametrize(
|
||||
"possible_mpi_spec,unify", [("mpich", False), ("mpich", True), ("zmpi", False), ("zmpi", True)]
|
||||
)
|
||||
def test_requires_on_virtual_and_potential_providers(
|
||||
possible_mpi_spec, unify, tmp_path, mock_packages, config
|
||||
):
|
||||
"""Tests that in an environment we can add packages explicitly, even though they provide
|
||||
a virtual package, and we require the provider of the same virtual to be another package,
|
||||
if they are added explicitly by their name.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
manifest = tmp_path / "spack.yaml"
|
||||
manifest.write_text(
|
||||
"""\
|
||||
f"""\
|
||||
spack:
|
||||
specs:
|
||||
- mpich
|
||||
- {possible_mpi_spec}
|
||||
- mpich2
|
||||
- mpileaks
|
||||
packages:
|
||||
mpi:
|
||||
require: mpich2
|
||||
concretizer:
|
||||
unify: {unify}
|
||||
"""
|
||||
)
|
||||
with ev.Environment(manifest.parent) as e:
|
||||
e.concretize()
|
||||
assert e.matching_spec("mpich")
|
||||
assert e.matching_spec(possible_mpi_spec)
|
||||
assert e.matching_spec("mpich2")
|
||||
|
||||
mpileaks = e.matching_spec("mpileaks")
|
||||
assert mpileaks.satisfies("^mpich2")
|
||||
assert mpileaks["mpi"].satisfies("mpich2")
|
||||
assert not mpileaks.satisfies("^mpich")
|
||||
assert not mpileaks.satisfies(f"^{possible_mpi_spec}")
|
||||
|
||||
|
||||
@pytest.mark.regression("39387")
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str", ["mpileaks +opt", "mpileaks +opt ~shared", "mpileaks ~shared +opt"]
|
||||
)
|
||||
def test_manifest_file_removal_works_if_spec_is_not_normalized(tmp_path, spec_str):
|
||||
"""Tests that we can remove a spec from a manifest file even if its string
|
||||
representation is not normalized.
|
||||
"""
|
||||
manifest = tmp_path / "spack.yaml"
|
||||
manifest.write_text(
|
||||
f"""\
|
||||
spack:
|
||||
specs:
|
||||
- {spec_str}
|
||||
"""
|
||||
)
|
||||
s = spack.spec.Spec(spec_str)
|
||||
spack_yaml = EnvironmentManifestFile(tmp_path)
|
||||
# Doing a round trip str -> Spec -> str normalizes the representation
|
||||
spack_yaml.remove_user_spec(str(s))
|
||||
spack_yaml.flush()
|
||||
|
||||
assert spec_str not in manifest.read_text()
|
||||
|
||||
|
||||
@pytest.mark.regression("39387")
|
||||
@pytest.mark.parametrize(
|
||||
"duplicate_specs,expected_number",
|
||||
[
|
||||
# Swap variants, versions, etc. add spaces
|
||||
(["foo +bar ~baz", "foo ~baz +bar"], 3),
|
||||
(["foo @1.0 ~baz %gcc", "foo ~baz @1.0%gcc"], 3),
|
||||
# Item 1 and 3 are exactly the same
|
||||
(["zlib +shared", "zlib +shared", "zlib +shared"], 4),
|
||||
],
|
||||
)
|
||||
def test_removing_spec_from_manifest_with_exact_duplicates(
|
||||
duplicate_specs, expected_number, tmp_path
|
||||
):
|
||||
"""Tests that we can remove exact duplicates from a manifest file.
|
||||
|
||||
Note that we can't get in a state with duplicates using only CLI, but this might happen
|
||||
on user edited spack.yaml files.
|
||||
"""
|
||||
manifest = tmp_path / "spack.yaml"
|
||||
manifest.write_text(
|
||||
f"""\
|
||||
spack:
|
||||
specs: [{", ".join(duplicate_specs)} , "zlib"]
|
||||
"""
|
||||
)
|
||||
|
||||
with ev.Environment(tmp_path) as env:
|
||||
assert len(env.user_specs) == expected_number
|
||||
env.remove(duplicate_specs[0])
|
||||
env.write()
|
||||
|
||||
assert "+shared" not in manifest.read_text()
|
||||
assert "zlib" in manifest.read_text()
|
||||
with ev.Environment(tmp_path) as env:
|
||||
assert len(env.user_specs) == 1
|
||||
|
||||
@@ -13,7 +13,8 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import islink, symlink
|
||||
import llnl.util.symlink
|
||||
from llnl.util.symlink import SymlinkError, _windows_can_symlink, islink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -150,7 +151,6 @@ def test_multiple_src_file_dest(self, stage):
|
||||
fs.install("source/a/*/*", "dest/1")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Skip test on Windows")
|
||||
class TestCopyTree:
|
||||
"""Tests for ``filesystem.copy_tree``"""
|
||||
|
||||
@@ -189,7 +189,7 @@ def test_symlinks_true(self, stage):
|
||||
def test_symlinks_true_ignore(self, stage):
|
||||
"""Test copying when specifying relative paths that should be ignored"""
|
||||
with fs.working_dir(str(stage)):
|
||||
ignore = lambda p: p in ["c/d/e", "a"]
|
||||
ignore = lambda p: p in [os.path.join("c", "d", "e"), "a"]
|
||||
fs.copy_tree("source", "dest", symlinks=True, ignore=ignore)
|
||||
assert not os.path.exists("dest/a")
|
||||
assert os.path.exists("dest/c/d")
|
||||
@@ -231,7 +231,6 @@ def test_parent_dir(self, stage):
|
||||
fs.copy_tree("source", "source/sub/directory")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Skip test on Windows")
|
||||
class TestInstallTree:
|
||||
"""Tests for ``filesystem.install_tree``"""
|
||||
|
||||
@@ -275,6 +274,15 @@ def test_symlinks_false(self, stage):
|
||||
assert not os.path.islink("dest/2")
|
||||
check_added_exe_permissions("source/2", "dest/2")
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Broken symlinks not allowed on Windows")
|
||||
def test_allow_broken_symlinks(self, stage):
|
||||
"""Test installing with a broken symlink."""
|
||||
with fs.working_dir(str(stage)):
|
||||
symlink("nonexistant.txt", "source/broken", allow_broken_symlinks=True)
|
||||
fs.install_tree("source", "dest", symlinks=True, allow_broken_symlinks=True)
|
||||
assert os.path.islink("dest/broken")
|
||||
assert not os.path.exists(os.readlink("dest/broken"))
|
||||
|
||||
def test_glob_src(self, stage):
|
||||
"""Test using a glob as the source."""
|
||||
|
||||
@@ -746,6 +754,7 @@ def test_is_nonsymlink_exe_with_shebang(tmpdir):
|
||||
assert not fs.is_nonsymlink_exe_with_shebang("symlink_to_executable_script")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Unix-only test.")
|
||||
def test_lexists_islink_isdir(tmpdir):
|
||||
root = str(tmpdir)
|
||||
|
||||
@@ -764,12 +773,12 @@ def test_lexists_islink_isdir(tmpdir):
|
||||
with open(file, "wb") as f:
|
||||
f.write(b"file")
|
||||
|
||||
os.symlink("dir", symlink_to_dir)
|
||||
os.symlink("file", symlink_to_file)
|
||||
os.symlink("does_not_exist", dangling_symlink)
|
||||
os.symlink("dangling_symlink", symlink_to_dangling_symlink)
|
||||
os.symlink("symlink_to_dir", symlink_to_symlink_to_dir)
|
||||
os.symlink("symlink_to_file", symlink_to_symlink_to_file)
|
||||
symlink("dir", symlink_to_dir)
|
||||
symlink("file", symlink_to_file)
|
||||
symlink("does_not_exist", dangling_symlink)
|
||||
symlink("dangling_symlink", symlink_to_dangling_symlink)
|
||||
symlink("symlink_to_dir", symlink_to_symlink_to_dir)
|
||||
symlink("symlink_to_file", symlink_to_symlink_to_file)
|
||||
|
||||
assert fs.lexists_islink_isdir(dir) == (True, False, True)
|
||||
assert fs.lexists_islink_isdir(file) == (True, False, False)
|
||||
@@ -781,6 +790,57 @@ def test_lexists_islink_isdir(tmpdir):
|
||||
assert fs.lexists_islink_isdir(symlink_to_symlink_to_file) == (True, True, False)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="For Windows Only")
|
||||
@pytest.mark.parametrize("win_can_symlink", [True, False])
|
||||
def test_lexists_islink_isdir_windows(tmpdir, monkeypatch, win_can_symlink):
|
||||
"""Run on windows without elevated privileges to test junctions and hard links which have
|
||||
different results from the lexists_islink_isdir method.
|
||||
"""
|
||||
if win_can_symlink and not _windows_can_symlink():
|
||||
pytest.skip("Cannot test dev mode behavior without dev mode enabled.")
|
||||
with tmpdir.as_cwd():
|
||||
monkeypatch.setattr(llnl.util.symlink, "_windows_can_symlink", lambda: win_can_symlink)
|
||||
dir = str(tmpdir.join("dir"))
|
||||
file = str(tmpdir.join("file"))
|
||||
nonexistent = str(tmpdir.join("does_not_exist"))
|
||||
symlink_to_dir = str(tmpdir.join("symlink_to_dir"))
|
||||
symlink_to_file = str(tmpdir.join("symlink_to_file"))
|
||||
dangling_symlink = str(tmpdir.join("dangling_symlink"))
|
||||
symlink_to_dangling_symlink = str(tmpdir.join("symlink_to_dangling_symlink"))
|
||||
symlink_to_symlink_to_dir = str(tmpdir.join("symlink_to_symlink_to_dir"))
|
||||
symlink_to_symlink_to_file = str(tmpdir.join("symlink_to_symlink_to_file"))
|
||||
|
||||
os.mkdir(dir)
|
||||
assert fs.lexists_islink_isdir(dir) == (True, False, True)
|
||||
|
||||
symlink("dir", symlink_to_dir)
|
||||
assert fs.lexists_islink_isdir(dir) == (True, False, True)
|
||||
assert fs.lexists_islink_isdir(symlink_to_dir) == (True, True, True)
|
||||
|
||||
with open(file, "wb") as f:
|
||||
f.write(b"file")
|
||||
assert fs.lexists_islink_isdir(file) == (True, False, False)
|
||||
|
||||
symlink("file", symlink_to_file)
|
||||
if win_can_symlink:
|
||||
assert fs.lexists_islink_isdir(file) == (True, False, False)
|
||||
else:
|
||||
assert fs.lexists_islink_isdir(file) == (True, True, False)
|
||||
assert fs.lexists_islink_isdir(symlink_to_file) == (True, True, False)
|
||||
|
||||
with pytest.raises(SymlinkError):
|
||||
symlink("does_not_exist", dangling_symlink)
|
||||
symlink("dangling_symlink", symlink_to_dangling_symlink)
|
||||
|
||||
symlink("symlink_to_dir", symlink_to_symlink_to_dir)
|
||||
symlink("symlink_to_file", symlink_to_symlink_to_file)
|
||||
|
||||
assert fs.lexists_islink_isdir(nonexistent) == (False, False, False)
|
||||
assert fs.lexists_islink_isdir(symlink_to_dangling_symlink) == (False, False, False)
|
||||
assert fs.lexists_islink_isdir(symlink_to_symlink_to_dir) == (True, True, True)
|
||||
assert fs.lexists_islink_isdir(symlink_to_symlink_to_file) == (True, True, False)
|
||||
|
||||
|
||||
class RegisterVisitor(fs.BaseDirectoryVisitor):
|
||||
"""A directory visitor that keeps track of all visited paths"""
|
||||
|
||||
|
||||
@@ -4,12 +4,14 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util.filesystem import mkdirp, touchp, visit_directory_tree, working_dir
|
||||
from llnl.util.link_tree import DestinationMergeVisitor, LinkTree, SourceMergeVisitor
|
||||
from llnl.util.symlink import islink
|
||||
from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink
|
||||
|
||||
from spack.stage import Stage
|
||||
|
||||
@@ -44,77 +46,116 @@ def link_tree(stage):
|
||||
def check_file_link(filename, expected_target):
|
||||
assert os.path.isfile(filename)
|
||||
assert islink(filename)
|
||||
assert os.path.abspath(os.path.realpath(filename)) == os.path.abspath(expected_target)
|
||||
if sys.platform != "win32" or llnl.util.symlink._windows_can_symlink():
|
||||
assert os.path.abspath(os.path.realpath(filename)) == os.path.abspath(expected_target)
|
||||
|
||||
|
||||
def check_dir(filename):
|
||||
assert os.path.isdir(filename)
|
||||
|
||||
|
||||
def test_merge_to_new_directory(stage, link_tree):
|
||||
@pytest.mark.parametrize("run_as_root", [True, False])
|
||||
def test_merge_to_new_directory(stage, link_tree, monkeypatch, run_as_root):
|
||||
if sys.platform != "win32":
|
||||
if run_as_root:
|
||||
pass
|
||||
else:
|
||||
pytest.skip("Skipping duplicate test.")
|
||||
elif _windows_can_symlink() or not run_as_root:
|
||||
monkeypatch.setattr(llnl.util.symlink, "_windows_can_symlink", lambda: run_as_root)
|
||||
else:
|
||||
# Skip if trying to run as dev-mode without having dev-mode.
|
||||
pytest.skip("Skipping portion of test which required dev-mode privileges.")
|
||||
|
||||
with working_dir(stage.path):
|
||||
link_tree.merge("dest")
|
||||
|
||||
check_file_link("dest/1", "source/1")
|
||||
check_file_link("dest/a/b/2", "source/a/b/2")
|
||||
check_file_link("dest/a/b/3", "source/a/b/3")
|
||||
check_file_link("dest/c/4", "source/c/4")
|
||||
check_file_link("dest/c/d/5", "source/c/d/5")
|
||||
check_file_link("dest/c/d/6", "source/c/d/6")
|
||||
check_file_link("dest/c/d/e/7", "source/c/d/e/7")
|
||||
files = [
|
||||
("dest/1", "source/1"),
|
||||
("dest/a/b/2", "source/a/b/2"),
|
||||
("dest/a/b/3", "source/a/b/3"),
|
||||
("dest/c/4", "source/c/4"),
|
||||
("dest/c/d/5", "source/c/d/5"),
|
||||
("dest/c/d/6", "source/c/d/6"),
|
||||
("dest/c/d/e/7", "source/c/d/e/7"),
|
||||
]
|
||||
|
||||
assert os.path.isabs(os.readlink("dest/1"))
|
||||
assert os.path.isabs(os.readlink("dest/a/b/2"))
|
||||
assert os.path.isabs(os.readlink("dest/a/b/3"))
|
||||
assert os.path.isabs(os.readlink("dest/c/4"))
|
||||
assert os.path.isabs(os.readlink("dest/c/d/5"))
|
||||
assert os.path.isabs(os.readlink("dest/c/d/6"))
|
||||
assert os.path.isabs(os.readlink("dest/c/d/e/7"))
|
||||
for dest, source in files:
|
||||
check_file_link(dest, source)
|
||||
assert os.path.isabs(readlink(dest))
|
||||
|
||||
link_tree.unmerge("dest")
|
||||
|
||||
assert not os.path.exists("dest")
|
||||
|
||||
|
||||
def test_merge_to_new_directory_relative(stage, link_tree):
|
||||
@pytest.mark.parametrize("run_as_root", [True, False])
|
||||
def test_merge_to_new_directory_relative(stage, link_tree, monkeypatch, run_as_root):
|
||||
if sys.platform != "win32":
|
||||
if run_as_root:
|
||||
pass
|
||||
else:
|
||||
pytest.skip("Skipping duplicate test.")
|
||||
elif _windows_can_symlink() or not run_as_root:
|
||||
monkeypatch.setattr(llnl.util.symlink, "_windows_can_symlink", lambda: run_as_root)
|
||||
else:
|
||||
# Skip if trying to run as dev-mode without having dev-mode.
|
||||
pytest.skip("Skipping portion of test which required dev-mode privileges.")
|
||||
|
||||
with working_dir(stage.path):
|
||||
link_tree.merge("dest", relative=True)
|
||||
|
||||
check_file_link("dest/1", "source/1")
|
||||
check_file_link("dest/a/b/2", "source/a/b/2")
|
||||
check_file_link("dest/a/b/3", "source/a/b/3")
|
||||
check_file_link("dest/c/4", "source/c/4")
|
||||
check_file_link("dest/c/d/5", "source/c/d/5")
|
||||
check_file_link("dest/c/d/6", "source/c/d/6")
|
||||
check_file_link("dest/c/d/e/7", "source/c/d/e/7")
|
||||
files = [
|
||||
("dest/1", "source/1"),
|
||||
("dest/a/b/2", "source/a/b/2"),
|
||||
("dest/a/b/3", "source/a/b/3"),
|
||||
("dest/c/4", "source/c/4"),
|
||||
("dest/c/d/5", "source/c/d/5"),
|
||||
("dest/c/d/6", "source/c/d/6"),
|
||||
("dest/c/d/e/7", "source/c/d/e/7"),
|
||||
]
|
||||
|
||||
assert not os.path.isabs(os.readlink("dest/1"))
|
||||
assert not os.path.isabs(os.readlink("dest/a/b/2"))
|
||||
assert not os.path.isabs(os.readlink("dest/a/b/3"))
|
||||
assert not os.path.isabs(os.readlink("dest/c/4"))
|
||||
assert not os.path.isabs(os.readlink("dest/c/d/5"))
|
||||
assert not os.path.isabs(os.readlink("dest/c/d/6"))
|
||||
assert not os.path.isabs(os.readlink("dest/c/d/e/7"))
|
||||
for dest, source in files:
|
||||
check_file_link(dest, source)
|
||||
# Hard links/junctions are inherently absolute.
|
||||
if sys.platform != "win32" or run_as_root:
|
||||
assert not os.path.isabs(readlink(dest))
|
||||
|
||||
link_tree.unmerge("dest")
|
||||
|
||||
assert not os.path.exists("dest")
|
||||
|
||||
|
||||
def test_merge_to_existing_directory(stage, link_tree):
|
||||
@pytest.mark.parametrize("run_as_root", [True, False])
|
||||
def test_merge_to_existing_directory(stage, link_tree, monkeypatch, run_as_root):
|
||||
if sys.platform != "win32":
|
||||
if run_as_root:
|
||||
pass
|
||||
else:
|
||||
pytest.skip("Skipping duplicate test.")
|
||||
elif _windows_can_symlink() or not run_as_root:
|
||||
monkeypatch.setattr(llnl.util.symlink, "_windows_can_symlink", lambda: run_as_root)
|
||||
else:
|
||||
# Skip if trying to run as dev-mode without having dev-mode.
|
||||
pytest.skip("Skipping portion of test which required dev-mode privileges.")
|
||||
|
||||
with working_dir(stage.path):
|
||||
touchp("dest/x")
|
||||
touchp("dest/a/b/y")
|
||||
|
||||
link_tree.merge("dest")
|
||||
|
||||
check_file_link("dest/1", "source/1")
|
||||
check_file_link("dest/a/b/2", "source/a/b/2")
|
||||
check_file_link("dest/a/b/3", "source/a/b/3")
|
||||
check_file_link("dest/c/4", "source/c/4")
|
||||
check_file_link("dest/c/d/5", "source/c/d/5")
|
||||
check_file_link("dest/c/d/6", "source/c/d/6")
|
||||
check_file_link("dest/c/d/e/7", "source/c/d/e/7")
|
||||
files = [
|
||||
("dest/1", "source/1"),
|
||||
("dest/a/b/2", "source/a/b/2"),
|
||||
("dest/a/b/3", "source/a/b/3"),
|
||||
("dest/c/4", "source/c/4"),
|
||||
("dest/c/d/5", "source/c/d/5"),
|
||||
("dest/c/d/6", "source/c/d/6"),
|
||||
("dest/c/d/e/7", "source/c/d/e/7"),
|
||||
]
|
||||
for dest, source in files:
|
||||
check_file_link(dest, source)
|
||||
|
||||
assert os.path.isfile("dest/x")
|
||||
assert os.path.isfile("dest/a/b/y")
|
||||
@@ -124,13 +165,8 @@ def test_merge_to_existing_directory(stage, link_tree):
|
||||
assert os.path.isfile("dest/x")
|
||||
assert os.path.isfile("dest/a/b/y")
|
||||
|
||||
assert not os.path.isfile("dest/1")
|
||||
assert not os.path.isfile("dest/a/b/2")
|
||||
assert not os.path.isfile("dest/a/b/3")
|
||||
assert not os.path.isfile("dest/c/4")
|
||||
assert not os.path.isfile("dest/c/d/5")
|
||||
assert not os.path.isfile("dest/c/d/6")
|
||||
assert not os.path.isfile("dest/c/d/e/7")
|
||||
for dest, _ in files:
|
||||
assert not os.path.isfile(dest)
|
||||
|
||||
|
||||
def test_merge_with_empty_directories(stage, link_tree):
|
||||
@@ -192,9 +228,9 @@ def test_source_merge_visitor_does_not_follow_symlinked_dirs_at_depth(tmpdir):
|
||||
os.mkdir(j("a", "b"))
|
||||
os.mkdir(j("a", "b", "c"))
|
||||
os.mkdir(j("a", "b", "c", "d"))
|
||||
os.symlink(j("b"), j("a", "symlink_b"))
|
||||
os.symlink(j("c"), j("a", "b", "symlink_c"))
|
||||
os.symlink(j("d"), j("a", "b", "c", "symlink_d"))
|
||||
symlink(j("b"), j("a", "symlink_b"))
|
||||
symlink(j("c"), j("a", "b", "symlink_c"))
|
||||
symlink(j("d"), j("a", "b", "c", "symlink_d"))
|
||||
with open(j("a", "b", "c", "d", "file"), "wb"):
|
||||
pass
|
||||
|
||||
@@ -236,10 +272,11 @@ def test_source_merge_visitor_cant_be_cyclical(tmpdir):
|
||||
j = os.path.join
|
||||
with tmpdir.as_cwd():
|
||||
os.mkdir(j("a"))
|
||||
os.symlink(j("..", "b"), j("a", "symlink_b"))
|
||||
os.symlink(j("symlink_b"), j("a", "symlink_b_b"))
|
||||
os.mkdir(j("b"))
|
||||
os.symlink(j("..", "a"), j("b", "symlink_a"))
|
||||
|
||||
symlink(j("..", "b"), j("a", "symlink_b"))
|
||||
symlink(j("symlink_b"), j("a", "symlink_b_b"))
|
||||
symlink(j("..", "a"), j("b", "symlink_a"))
|
||||
|
||||
visitor = SourceMergeVisitor()
|
||||
visit_directory_tree(str(tmpdir), visitor)
|
||||
|
||||
247
lib/spack/spack/test/llnl/util/symlink.py
Normal file
247
lib/spack/spack/test/llnl/util/symlink.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Tests for ``llnl/util/symlink.py``"""
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util import symlink
|
||||
|
||||
|
||||
def test_symlink_file(tmpdir):
|
||||
"""Test the symlink.symlink functionality on all operating systems for a file"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=test_dir)
|
||||
link_file = str(tmpdir.join("link.txt"))
|
||||
assert os.path.exists(link_file) is False
|
||||
symlink.symlink(source_path=real_file, link_path=link_file)
|
||||
assert os.path.exists(link_file)
|
||||
assert symlink.islink(link_file)
|
||||
|
||||
|
||||
def test_symlink_dir(tmpdir):
|
||||
"""Test the symlink.symlink functionality on all operating systems for a directory"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
real_dir = os.path.join(test_dir, "real_dir")
|
||||
link_dir = os.path.join(test_dir, "link_dir")
|
||||
os.mkdir(real_dir)
|
||||
symlink.symlink(source_path=real_dir, link_path=link_dir)
|
||||
assert os.path.exists(link_dir)
|
||||
assert symlink.islink(link_dir)
|
||||
|
||||
|
||||
def test_symlink_source_not_exists(tmpdir):
|
||||
"""Test the symlink.symlink method for the case where a source path does not exist"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
real_dir = os.path.join(test_dir, "real_dir")
|
||||
link_dir = os.path.join(test_dir, "link_dir")
|
||||
with pytest.raises(symlink.SymlinkError):
|
||||
symlink.symlink(source_path=real_dir, link_path=link_dir, allow_broken_symlinks=False)
|
||||
|
||||
|
||||
def test_symlink_src_relative_to_link(tmpdir):
|
||||
"""Test the symlink.symlink functionality where the source value exists relative to the link
|
||||
but not relative to the cwd"""
|
||||
with tmpdir.as_cwd():
|
||||
subdir_1 = tmpdir.join("a")
|
||||
subdir_2 = os.path.join(subdir_1, "b")
|
||||
link_dir = os.path.join(subdir_1, "c")
|
||||
|
||||
os.mkdir(subdir_1)
|
||||
os.mkdir(subdir_2)
|
||||
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=subdir_2)
|
||||
link_file = os.path.join(subdir_1, "link.txt")
|
||||
|
||||
symlink.symlink(
|
||||
source_path=f"b/{os.path.basename(real_file)}",
|
||||
link_path=f"a/{os.path.basename(link_file)}",
|
||||
)
|
||||
assert os.path.exists(link_file)
|
||||
assert symlink.islink(link_file)
|
||||
# Check dirs
|
||||
assert not os.path.lexists(link_dir)
|
||||
symlink.symlink(source_path="b", link_path="a/c")
|
||||
assert os.path.lexists(link_dir)
|
||||
|
||||
|
||||
def test_symlink_src_not_relative_to_link(tmpdir):
|
||||
"""Test the symlink.symlink functionality where the source value does not exist relative to
|
||||
the link and not relative to the cwd. NOTE that this symlink api call is EXPECTED to raise
|
||||
a symlink.SymlinkError exception that we catch."""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
subdir_1 = os.path.join(test_dir, "a")
|
||||
subdir_2 = os.path.join(subdir_1, "b")
|
||||
link_dir = os.path.join(subdir_1, "c")
|
||||
os.mkdir(subdir_1)
|
||||
os.mkdir(subdir_2)
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=subdir_2)
|
||||
link_file = str(tmpdir.join("link.txt"))
|
||||
# Expected SymlinkError because source path does not exist relative to link path
|
||||
with pytest.raises(symlink.SymlinkError):
|
||||
symlink.symlink(
|
||||
source_path=f"d/{os.path.basename(real_file)}",
|
||||
link_path=f"a/{os.path.basename(link_file)}",
|
||||
allow_broken_symlinks=False,
|
||||
)
|
||||
assert not os.path.exists(link_file)
|
||||
# Check dirs
|
||||
assert not os.path.lexists(link_dir)
|
||||
with pytest.raises(symlink.SymlinkError):
|
||||
symlink.symlink(source_path="d", link_path="a/c", allow_broken_symlinks=False)
|
||||
assert not os.path.lexists(link_dir)
|
||||
|
||||
|
||||
def test_symlink_link_already_exists(tmpdir):
|
||||
"""Test the symlink.symlink method for the case where a link already exists"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
real_dir = os.path.join(test_dir, "real_dir")
|
||||
link_dir = os.path.join(test_dir, "link_dir")
|
||||
os.mkdir(real_dir)
|
||||
symlink.symlink(real_dir, link_dir, allow_broken_symlinks=False)
|
||||
assert os.path.exists(link_dir)
|
||||
with pytest.raises(symlink.SymlinkError):
|
||||
symlink.symlink(source_path=real_dir, link_path=link_dir, allow_broken_symlinks=False)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not symlink._windows_can_symlink(), reason="Test requires elevated privileges")
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_symlink_win_file(tmpdir):
|
||||
"""Check that symlink.symlink makes a symlink file when run with elevated permissions"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=test_dir)
|
||||
link_file = str(tmpdir.join("link.txt"))
|
||||
symlink.symlink(source_path=real_file, link_path=link_file)
|
||||
# Verify that all expected conditions are met
|
||||
assert os.path.exists(link_file)
|
||||
assert symlink.islink(link_file)
|
||||
assert os.path.islink(link_file)
|
||||
assert not symlink._windows_is_hardlink(link_file)
|
||||
assert not symlink._windows_is_junction(link_file)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not symlink._windows_can_symlink(), reason="Test requires elevated privileges")
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_symlink_win_dir(tmpdir):
|
||||
"""Check that symlink.symlink makes a symlink dir when run with elevated permissions"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
real_dir = os.path.join(test_dir, "real")
|
||||
link_dir = os.path.join(test_dir, "link")
|
||||
os.mkdir(real_dir)
|
||||
symlink.symlink(source_path=real_dir, link_path=link_dir)
|
||||
# Verify that all expected conditions are met
|
||||
assert os.path.exists(link_dir)
|
||||
assert symlink.islink(link_dir)
|
||||
assert os.path.islink(link_dir)
|
||||
assert not symlink._windows_is_hardlink(link_dir)
|
||||
assert not symlink._windows_is_junction(link_dir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_windows_create_junction(tmpdir):
|
||||
"""Test the symlink._windows_create_junction method"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
junction_real_dir = os.path.join(test_dir, "real_dir")
|
||||
junction_link_dir = os.path.join(test_dir, "link_dir")
|
||||
os.mkdir(junction_real_dir)
|
||||
symlink._windows_create_junction(junction_real_dir, junction_link_dir)
|
||||
# Verify that all expected conditions are met
|
||||
assert os.path.exists(junction_link_dir)
|
||||
assert symlink._windows_is_junction(junction_link_dir)
|
||||
assert symlink.islink(junction_link_dir)
|
||||
assert not os.path.islink(junction_link_dir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_windows_create_hard_link(tmpdir):
|
||||
"""Test the symlink._windows_create_hard_link method"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=test_dir)
|
||||
link_file = str(tmpdir.join("link.txt"))
|
||||
symlink._windows_create_hard_link(real_file, link_file)
|
||||
# Verify that all expected conditions are met
|
||||
assert os.path.exists(link_file)
|
||||
assert symlink._windows_is_hardlink(real_file)
|
||||
assert symlink._windows_is_hardlink(link_file)
|
||||
assert symlink.islink(link_file)
|
||||
assert not os.path.islink(link_file)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_windows_create_link_dir(tmpdir):
|
||||
"""Test the functionality of the windows_create_link method with a directory
|
||||
which should result in making a junction.
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
real_dir = os.path.join(test_dir, "real")
|
||||
link_dir = os.path.join(test_dir, "link")
|
||||
os.mkdir(real_dir)
|
||||
symlink._windows_create_link(real_dir, link_dir)
|
||||
# Verify that all expected conditions are met
|
||||
assert os.path.exists(link_dir)
|
||||
assert symlink.islink(link_dir)
|
||||
assert not symlink._windows_is_hardlink(link_dir)
|
||||
assert symlink._windows_is_junction(link_dir)
|
||||
assert not os.path.islink(link_dir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_windows_create_link_file(tmpdir):
|
||||
"""Test the functionality of the windows_create_link method with a file
|
||||
which should result in the creation of a hard link. It also tests the
|
||||
functionality of the symlink islink infrastructure.
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_dir = str(tmpdir)
|
||||
fd, real_file = tempfile.mkstemp(prefix="real", suffix=".txt", dir=test_dir)
|
||||
link_file = str(tmpdir.join("link.txt"))
|
||||
symlink._windows_create_link(real_file, link_file)
|
||||
# Verify that all expected conditions are met
|
||||
assert symlink._windows_is_hardlink(link_file)
|
||||
assert symlink.islink(link_file)
|
||||
assert not symlink._windows_is_junction(link_file)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="Test is only for Windows")
|
||||
def test_windows_read_link(tmpdir):
|
||||
"""Makes sure symlink.readlink can read the link source for hard links and
|
||||
junctions on windows."""
|
||||
with tmpdir.as_cwd():
|
||||
real_dir_1 = "real_dir_1"
|
||||
real_dir_2 = "real_dir_2"
|
||||
link_dir_1 = "link_dir_1"
|
||||
link_dir_2 = "link_dir_2"
|
||||
os.mkdir(real_dir_1)
|
||||
os.mkdir(real_dir_2)
|
||||
|
||||
# Create a file and a directory
|
||||
_, real_file_1 = tempfile.mkstemp(prefix="real_1", suffix=".txt", dir=".")
|
||||
_, real_file_2 = tempfile.mkstemp(prefix="real_2", suffix=".txt", dir=".")
|
||||
link_file_1 = "link_1.txt"
|
||||
link_file_2 = "link_2.txt"
|
||||
|
||||
# Make hard link/junction
|
||||
symlink._windows_create_hard_link(real_file_1, link_file_1)
|
||||
symlink._windows_create_hard_link(real_file_2, link_file_2)
|
||||
symlink._windows_create_junction(real_dir_1, link_dir_1)
|
||||
symlink._windows_create_junction(real_dir_2, link_dir_2)
|
||||
|
||||
assert symlink.readlink(link_file_1) == os.path.abspath(real_file_1)
|
||||
assert symlink.readlink(link_file_2) == os.path.abspath(real_file_2)
|
||||
assert symlink.readlink(link_dir_1) == os.path.abspath(real_dir_1)
|
||||
assert symlink.readlink(link_dir_2) == os.path.abspath(real_dir_2)
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import resolve_link_target_relative_to_the_link
|
||||
from llnl.util.symlink import resolve_link_target_relative_to_the_link
|
||||
|
||||
import spack.mirror
|
||||
import spack.repo
|
||||
@@ -228,6 +228,9 @@ def successful_expand(_class):
|
||||
def successful_apply(*args, **kwargs):
|
||||
pass
|
||||
|
||||
def successful_symlink(*args, **kwargs):
|
||||
pass
|
||||
|
||||
with Stage("spack-mirror-test") as stage:
|
||||
mirror_root = os.path.join(stage.path, "test-mirror")
|
||||
|
||||
@@ -235,6 +238,7 @@ def successful_apply(*args, **kwargs):
|
||||
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
|
||||
monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
|
||||
monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
|
||||
monkeypatch.setattr(spack.caches.MirrorCache, "symlink", successful_symlink)
|
||||
|
||||
with spack.config.override("config:checksum", False):
|
||||
spack.mirror.create(mirror_root, list(spec.traverse()))
|
||||
|
||||
@@ -147,8 +147,15 @@ def test_relocate_links(tmpdir):
|
||||
|
||||
own_prefix_path = str(tmpdir.join("prefix_a", "file"))
|
||||
dep_prefix_path = str(tmpdir.join("prefix_b", "file"))
|
||||
new_own_prefix_path = str(tmpdir.join("new_prefix_a", "file"))
|
||||
new_dep_prefix_path = str(tmpdir.join("new_prefix_b", "file"))
|
||||
system_path = os.path.join(os.path.sep, "system", "path")
|
||||
|
||||
fs.touchp(own_prefix_path)
|
||||
fs.touchp(new_own_prefix_path)
|
||||
fs.touchp(dep_prefix_path)
|
||||
fs.touchp(new_dep_prefix_path)
|
||||
|
||||
# Old prefixes to new prefixes
|
||||
prefix_to_prefix = OrderedDict(
|
||||
[
|
||||
|
||||
@@ -1291,3 +1291,38 @@ def test_constrain(factory, lhs_str, rhs_str, result, constrained_str):
|
||||
rhs = factory(rhs_str)
|
||||
rhs.constrain(lhs)
|
||||
assert rhs == factory(constrained_str)
|
||||
|
||||
|
||||
def test_abstract_hash_intersects_and_satisfies(default_mock_concretization):
|
||||
concrete: Spec = default_mock_concretization("a")
|
||||
hash = concrete.dag_hash()
|
||||
hash_5 = hash[:5]
|
||||
hash_6 = hash[:6]
|
||||
# abstract hash that doesn't have a common prefix with the others.
|
||||
hash_other = f"{'a' if hash_5[0] == 'b' else 'b'}{hash_5[1:]}"
|
||||
|
||||
abstract_5 = Spec(f"a/{hash_5}")
|
||||
abstract_6 = Spec(f"a/{hash_6}")
|
||||
abstract_none = Spec(f"a/{hash_other}")
|
||||
abstract = Spec("a")
|
||||
|
||||
def assert_subset(a: Spec, b: Spec):
|
||||
assert a.intersects(b) and b.intersects(a) and a.satisfies(b) and not b.satisfies(a)
|
||||
|
||||
def assert_disjoint(a: Spec, b: Spec):
|
||||
assert (
|
||||
not a.intersects(b)
|
||||
and not b.intersects(a)
|
||||
and not a.satisfies(b)
|
||||
and not b.satisfies(a)
|
||||
)
|
||||
|
||||
# left-hand side is more constrained, so its
|
||||
# concretization space is a subset of the right-hand side's
|
||||
assert_subset(concrete, abstract_5)
|
||||
assert_subset(abstract_6, abstract_5)
|
||||
assert_subset(abstract_5, abstract)
|
||||
|
||||
# disjoint concretization space
|
||||
assert_disjoint(abstract_none, concrete)
|
||||
assert_disjoint(abstract_none, abstract_5)
|
||||
|
||||
@@ -726,22 +726,31 @@ def test_multiple_specs_with_hash(database, config):
|
||||
|
||||
@pytest.mark.db
|
||||
def test_ambiguous_hash(mutable_database, default_mock_concretization, config):
|
||||
"""Test that abstract hash ambiguity is delayed until concretization.
|
||||
In the past this ambiguity error would happen during parse time."""
|
||||
|
||||
# This is a very sketchy as manually setting hashes easily breaks invariants
|
||||
x1 = default_mock_concretization("a")
|
||||
x2 = x1.copy()
|
||||
x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
|
||||
x1._process_hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
|
||||
x2._hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
mutable_database.add(x1, spack.store.STORE.layout)
|
||||
mutable_database.add(x2, spack.store.STORE.layout)
|
||||
x2._process_hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
|
||||
assert x1 != x2 # doesn't hold when only the dag hash is modified.
|
||||
|
||||
mutable_database.add(x1, directory_layout=None)
|
||||
mutable_database.add(x2, directory_layout=None)
|
||||
|
||||
# ambiguity in first hash character
|
||||
s1 = SpecParser("/x").next_spec()
|
||||
with pytest.raises(spack.spec.AmbiguousHashError):
|
||||
parsed_spec = SpecParser("/x").next_spec()
|
||||
parsed_spec.replace_hash()
|
||||
s1.lookup_hash()
|
||||
|
||||
# ambiguity in first hash character AND spec name
|
||||
s2 = SpecParser("a/x").next_spec()
|
||||
with pytest.raises(spack.spec.AmbiguousHashError):
|
||||
parsed_spec = SpecParser("a/x").next_spec()
|
||||
parsed_spec.replace_hash()
|
||||
s2.lookup_hash()
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
||||
@@ -172,8 +172,8 @@ def test_escape_double_quotes_in_shell_modifications():
|
||||
assert r'set "VAR=$PATH;$ANOTHER_PATH"' in cmds
|
||||
assert r'set "QUOTED_VAR="MY_VAL"' in cmds
|
||||
cmds = to_validate.shell_modifications(shell="pwsh")
|
||||
assert r"$Env:VAR=$PATH;$ANOTHER_PATH" in cmds
|
||||
assert r'$Env:QUOTED_VAR="MY_VAL"' in cmds
|
||||
assert "$Env:VAR='$PATH;$ANOTHER_PATH'" in cmds
|
||||
assert "$Env:QUOTED_VAR='\"MY_VAL\"'" in cmds
|
||||
else:
|
||||
cmds = to_validate.shell_modifications()
|
||||
assert 'export VAR="$PATH:$ANOTHER_PATH"' in cmds
|
||||
|
||||
@@ -126,8 +126,8 @@ def test_timer_write():
|
||||
|
||||
deserialized = json.loads(json_buffer.getvalue())
|
||||
assert deserialized == {
|
||||
"phases": [{"name": "timer", "seconds": 1.0}],
|
||||
"total": {"seconds": 3.0},
|
||||
"phases": [{"name": "timer", "path": "timer", "seconds": 1.0, "count": 1}],
|
||||
"total": 3.0,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ def allowed_archive(path):
|
||||
return False if not path else any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
|
||||
|
||||
def _system_untar(archive_file):
|
||||
def _system_untar(archive_file, remove_archive_file=False):
|
||||
"""Returns path to unarchived tar file.
|
||||
Untars archive via system tar.
|
||||
|
||||
@@ -89,6 +89,11 @@ def _system_untar(archive_file):
|
||||
tar = which("tar", required=True)
|
||||
tar.add_default_arg("-oxf")
|
||||
tar(archive_file)
|
||||
if remove_archive_file:
|
||||
# remove input file to prevent two stage
|
||||
# extractions from being treated as exploding
|
||||
# archives by the fetcher
|
||||
os.remove(archive_file)
|
||||
return outfile
|
||||
|
||||
|
||||
@@ -243,13 +248,9 @@ def _win_compressed_tarball_handler(decompressor):
|
||||
def unarchive(archive_file):
|
||||
# perform intermediate extraction step
|
||||
# record name of new archive so we can extract
|
||||
# and later clean up
|
||||
decomped_tarball = decompressor(archive_file)
|
||||
# run tar on newly decomped archive
|
||||
outfile = _system_untar(decomped_tarball)
|
||||
# clean intermediate archive to mimic end result
|
||||
# produced by one shot decomp/extraction
|
||||
os.remove(decomped_tarball)
|
||||
outfile = _system_untar(decomped_tarball, remove_archive_file=True)
|
||||
return outfile
|
||||
|
||||
return unarchive
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import spack.config
|
||||
|
||||
|
||||
def cpus_available():
|
||||
@@ -18,3 +21,36 @@ def cpus_available():
|
||||
return len(os.sched_getaffinity(0)) # novermin
|
||||
except Exception:
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
*,
|
||||
parallel: bool = False,
|
||||
max_cpus: int = cpus_available(),
|
||||
config: Optional["spack.config.Configuration"] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||
number of jobs set on the command line. If not set, then we use the config
|
||||
defaults (which is usually set through the builtin config scope), but we
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel: true when package supports parallel builds
|
||||
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
|
||||
config: configuration object (defaults to global config)
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
|
||||
cfg = config or spack.config.CONFIG
|
||||
|
||||
# Command line overrides all
|
||||
try:
|
||||
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
|
||||
if command_line is not None:
|
||||
return command_line
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return min(max_cpus, cfg.get("config:build_jobs", 16))
|
||||
|
||||
@@ -9,7 +9,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: Set of hash algorithms that Spack can use, mapped to digest size in bytes
|
||||
hashes = {"md5": 16, "sha1": 20, "sha224": 28, "sha256": 32, "sha384": 48, "sha512": 64}
|
||||
hashes = {"sha256": 32, "md5": 16, "sha1": 20, "sha224": 28, "sha384": 48, "sha512": 64}
|
||||
# Note: keys are ordered by popularity for earliest return in ``hash_key in version_dict`` checks.
|
||||
|
||||
|
||||
#: size of hash digests in bytes, mapped to algoritm names
|
||||
|
||||
@@ -432,6 +432,47 @@ def get_rpaths(path):
|
||||
return rpath.split(":")
|
||||
|
||||
|
||||
def delete_rpath(path):
|
||||
"""Modifies a binary to remove the rpath. It zeros out the rpath string
|
||||
and also drops the DT_R(UN)PATH entry from the dynamic section, so it doesn't
|
||||
show up in 'readelf -d file', nor in 'strings file'."""
|
||||
with open(path, "rb+") as f:
|
||||
elf = parse_elf(f, interpreter=False, dynamic_section=True)
|
||||
|
||||
if not elf.has_rpath:
|
||||
return
|
||||
|
||||
# Zero out the rpath *string* in the binary
|
||||
new_rpath_string = b"\x00" * len(elf.dt_rpath_str)
|
||||
rpath_offset = elf.pt_dynamic_strtab_offset + elf.rpath_strtab_offset
|
||||
f.seek(rpath_offset)
|
||||
f.write(new_rpath_string)
|
||||
|
||||
# Next update the dynamic array
|
||||
f.seek(elf.pt_dynamic_p_offset)
|
||||
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
|
||||
dynamic_array_size = calcsize(dynamic_array_fmt)
|
||||
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array
|
||||
old_offset = elf.pt_dynamic_p_offset # points to the current dynamic array
|
||||
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
|
||||
data = read_exactly(f, dynamic_array_size, "Malformed dynamic array entry")
|
||||
tag, _ = unpack(dynamic_array_fmt, data)
|
||||
|
||||
# Overwrite any entry that is not DT_RPATH or DT_RUNPATH, including DT_NULL
|
||||
if tag != ELF_CONSTANTS.DT_RPATH and tag != ELF_CONSTANTS.DT_RUNPATH:
|
||||
if new_offset != old_offset:
|
||||
f.seek(new_offset)
|
||||
f.write(data)
|
||||
f.seek(old_offset + dynamic_array_size)
|
||||
new_offset += dynamic_array_size
|
||||
|
||||
# End of the dynamic array
|
||||
if tag == ELF_CONSTANTS.DT_NULL:
|
||||
break
|
||||
|
||||
old_offset += dynamic_array_size
|
||||
|
||||
|
||||
def replace_rpath_in_place_or_raise(path, substitutions):
|
||||
regex = re.compile(b"|".join(re.escape(p) for p in substitutions.keys()))
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
"csh": "setenv {0} {1};\n",
|
||||
"fish": "set -gx {0} {1};\n",
|
||||
"bat": 'set "{0}={1}"\n',
|
||||
"pwsh": "$Env:{0}={1}\n",
|
||||
"pwsh": "$Env:{0}='{1}'\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
"csh": "unsetenv {0};\n",
|
||||
"fish": "set -e {0};\n",
|
||||
"bat": 'set "{0}="\n',
|
||||
"pwsh": "Remove-Item Env:{0}\n",
|
||||
"pwsh": "Set-Item -Path Env:{0}\n",
|
||||
}
|
||||
|
||||
|
||||
@@ -429,7 +429,7 @@ class RemovePath(NameValueModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"RemovePath: {self.name}-{str(self.value)}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = environment_value.split(self.separator)
|
||||
directories = [
|
||||
path_to_os_path(os.path.normpath(x)).pop()
|
||||
for x in directories
|
||||
@@ -724,11 +724,10 @@ def shell_modifications(
|
||||
cmds += _SHELL_UNSET_STRINGS[shell].format(name)
|
||||
else:
|
||||
if sys.platform != "win32":
|
||||
cmd = _SHELL_SET_STRINGS[shell].format(
|
||||
name, double_quote_escape(new_env[name])
|
||||
)
|
||||
new_env_name = double_quote_escape(new_env[name])
|
||||
else:
|
||||
cmd = _SHELL_SET_STRINGS[shell].format(name, new_env[name])
|
||||
new_env_name = new_env[name]
|
||||
cmd = _SHELL_SET_STRINGS[shell].format(name, new_env_name)
|
||||
cmds += cmd
|
||||
return cmds
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
__all__ = ["load", "dump", "SpackJSONError"]
|
||||
|
||||
_json_dump_args = {"indent": 2, "separators": (",", ": ")}
|
||||
_json_dump_args = {"indent": None, "separators": (",", ":")}
|
||||
|
||||
|
||||
def load(stream: Any) -> Dict:
|
||||
|
||||
@@ -13,31 +13,32 @@
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from typing import Dict
|
||||
from typing import Callable, Dict, List
|
||||
|
||||
from llnl.util.lang import pretty_seconds_formatter
|
||||
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
Interval = collections.namedtuple("Interval", ("begin", "end"))
|
||||
TimerEvent = collections.namedtuple("TimerEvent", ("time", "running", "label"))
|
||||
TimeTracker = collections.namedtuple("TimeTracker", ("total", "start", "count", "path"))
|
||||
|
||||
#: name for the global timer (used in start(), stop(), duration() without arguments)
|
||||
global_timer_name = "_global"
|
||||
|
||||
|
||||
class BaseTimer:
|
||||
def start(self, name=global_timer_name):
|
||||
def start(self, name=None):
|
||||
pass
|
||||
|
||||
def stop(self, name=global_timer_name):
|
||||
def stop(self, name=None):
|
||||
pass
|
||||
|
||||
def duration(self, name=global_timer_name):
|
||||
def duration(self, name=None):
|
||||
return 0.0
|
||||
|
||||
@contextmanager
|
||||
def measure(self, name):
|
||||
yield
|
||||
yield self
|
||||
|
||||
@property
|
||||
def phases(self):
|
||||
@@ -60,16 +61,18 @@ class NullTimer(BaseTimer):
|
||||
class Timer(BaseTimer):
|
||||
"""Simple interval timer"""
|
||||
|
||||
def __init__(self, now=time.time):
|
||||
def __init__(self, now: Callable[[], float] = time.time):
|
||||
"""
|
||||
Arguments:
|
||||
now: function that gives the seconds since e.g. epoch
|
||||
"""
|
||||
self._now = now
|
||||
self._timers: Dict[str, Interval] = collections.OrderedDict()
|
||||
self._timers: Dict[str, TimeTracker] = {}
|
||||
self._timer_stack: List[str] = []
|
||||
|
||||
# _global is the overal timer since the instance was created
|
||||
self._timers[global_timer_name] = Interval(self._now(), end=None)
|
||||
self._events: List[TimerEvent] = []
|
||||
# Push start event
|
||||
self._events.append(TimerEvent(self._now(), True, global_timer_name))
|
||||
|
||||
def start(self, name=global_timer_name):
|
||||
"""
|
||||
@@ -79,7 +82,7 @@ def start(self, name=global_timer_name):
|
||||
name (str): Optional name of the timer. When no name is passed, the
|
||||
global timer is started.
|
||||
"""
|
||||
self._timers[name] = Interval(self._now(), None)
|
||||
self._events.append(TimerEvent(self._now(), True, name))
|
||||
|
||||
def stop(self, name=global_timer_name):
|
||||
"""
|
||||
@@ -90,10 +93,7 @@ def stop(self, name=global_timer_name):
|
||||
name (str): Optional name of the timer. When no name is passed, all
|
||||
timers are stopped.
|
||||
"""
|
||||
interval = self._timers.get(name, None)
|
||||
if not interval:
|
||||
return
|
||||
self._timers[name] = Interval(interval.begin, self._now())
|
||||
self._events.append(TimerEvent(self._now(), False, name))
|
||||
|
||||
def duration(self, name=global_timer_name):
|
||||
"""
|
||||
@@ -107,13 +107,13 @@ def duration(self, name=global_timer_name):
|
||||
Returns:
|
||||
float: duration of timer.
|
||||
"""
|
||||
try:
|
||||
interval = self._timers[name]
|
||||
except KeyError:
|
||||
self._flatten()
|
||||
if name in self._timers:
|
||||
if name in self._timer_stack:
|
||||
return self._timers[name].total + (self._now() - self._timers[name].start)
|
||||
return self._timers[name].total
|
||||
else:
|
||||
return 0.0
|
||||
# Take either the interval end, the global timer, or now.
|
||||
end = interval.end or self._timers[global_timer_name].end or self._now()
|
||||
return end - interval.begin
|
||||
|
||||
@contextmanager
|
||||
def measure(self, name):
|
||||
@@ -123,23 +123,72 @@ def measure(self, name):
|
||||
Arguments:
|
||||
name (str): Name of the timer
|
||||
"""
|
||||
begin = self._now()
|
||||
yield
|
||||
self._timers[name] = Interval(begin, self._now())
|
||||
self.start(name)
|
||||
yield self
|
||||
self.stop(name)
|
||||
|
||||
@property
|
||||
def phases(self):
|
||||
"""Get all named timers (excluding the global/total timer)"""
|
||||
return [k for k in self._timers.keys() if k != global_timer_name]
|
||||
self._flatten()
|
||||
return [k for k in self._timers.keys() if not k == global_timer_name]
|
||||
|
||||
def write_json(self, out=sys.stdout):
|
||||
def _flatten(self):
|
||||
for event in self._events:
|
||||
if event.running:
|
||||
if event.label not in self._timer_stack:
|
||||
self._timer_stack.append(event.label)
|
||||
# Only start the timer if it is on top of the stack
|
||||
# restart doesn't work after a subtimer is started
|
||||
if event.label == self._timer_stack[-1]:
|
||||
timer_path = "/".join(self._timer_stack[1:])
|
||||
tracker = self._timers.get(
|
||||
event.label, TimeTracker(0.0, event.time, 0, timer_path)
|
||||
)
|
||||
assert tracker.path == timer_path
|
||||
self._timers[event.label] = TimeTracker(
|
||||
tracker.total, event.time, tracker.count, tracker.path
|
||||
)
|
||||
else: # if not event.running:
|
||||
if event.label in self._timer_stack:
|
||||
index = self._timer_stack.index(event.label)
|
||||
for label in self._timer_stack[index:]:
|
||||
tracker = self._timers[label]
|
||||
self._timers[label] = TimeTracker(
|
||||
tracker.total + (event.time - tracker.start),
|
||||
None,
|
||||
tracker.count + 1,
|
||||
tracker.path,
|
||||
)
|
||||
self._timer_stack = self._timer_stack[: max(0, index)]
|
||||
# clear events
|
||||
self._events = []
|
||||
|
||||
def write_json(self, out=sys.stdout, extra_attributes={}):
|
||||
"""Write a json object with times to file"""
|
||||
phases = [{"name": p, "seconds": self.duration(p)} for p in self.phases]
|
||||
times = {"phases": phases, "total": {"seconds": self.duration()}}
|
||||
out.write(sjson.dump(times))
|
||||
self._flatten()
|
||||
data = {
|
||||
"total": self._timers[global_timer_name].total,
|
||||
"phases": [
|
||||
{
|
||||
"name": phase,
|
||||
"path": self._timers[phase].path,
|
||||
"seconds": self._timers[phase].total,
|
||||
"count": self._timers[phase].count,
|
||||
}
|
||||
for phase in self.phases
|
||||
],
|
||||
}
|
||||
if extra_attributes:
|
||||
data.update(extra_attributes)
|
||||
if out:
|
||||
out.write(sjson.dump(data))
|
||||
else:
|
||||
return data
|
||||
|
||||
def write_tty(self, out=sys.stdout):
|
||||
"""Write a human-readable summary of timings"""
|
||||
"""Write a human-readable summary of timings (depth is 1)"""
|
||||
self._flatten()
|
||||
|
||||
times = [self.duration(p) for p in self.phases]
|
||||
|
||||
|
||||
@@ -52,6 +52,20 @@ if test -n "${ZSH_VERSION:-}" ; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# compgen -W doesn't work in some versions of zsh, so use this instead.
|
||||
# see https://www.zsh.org/mla/workers/2011/msg00582.html
|
||||
_compgen_w() {
|
||||
if test -n "${ZSH_VERSION:-}" ; then
|
||||
typeset -a words
|
||||
words=( ${~=1} )
|
||||
local find="$2"
|
||||
results=(${(M)words[@]:#$find*})
|
||||
echo "${results[@]}"
|
||||
else
|
||||
compgen -W "$1" -- "$2"
|
||||
fi
|
||||
}
|
||||
|
||||
# Bash programmable completion for Spack
|
||||
_bash_completion_spack() {
|
||||
# In all following examples, let the cursor be denoted by brackets, i.e. []
|
||||
@@ -137,8 +151,11 @@ _bash_completion_spack() {
|
||||
if [[ "$(LC_ALL=C type $subfunction 2>&1)" =~ $rgx ]]
|
||||
then
|
||||
$subfunction
|
||||
COMPREPLY=($(compgen -W "$SPACK_COMPREPLY" -- "$cur"))
|
||||
COMPREPLY=($(_compgen_w "$SPACK_COMPREPLY" "$cur"))
|
||||
fi
|
||||
|
||||
# if every completion is an alias for the same thing, just return that thing.
|
||||
_spack_compress_aliases
|
||||
}
|
||||
|
||||
# Helper functions for subcommands
|
||||
@@ -328,6 +345,51 @@ _spacktivate() {
|
||||
_spack_env_activate
|
||||
}
|
||||
|
||||
# Simple function to get the spack alias for a command
|
||||
_spack_get_alias() {
|
||||
local possible_alias="${1-}"
|
||||
local IFS=";"
|
||||
|
||||
# spack aliases are a ;-separated list of :-separated pairs
|
||||
for item in $SPACK_ALIASES; do
|
||||
# maps a possible alias to its command
|
||||
eval "local real_command=\"\${item#*${possible_alias}:}\""
|
||||
if [ "$real_command" != "$item" ]; then
|
||||
SPACK_ALIAS="$real_command"
|
||||
return
|
||||
fi
|
||||
done
|
||||
|
||||
# no alias found -- just return $1
|
||||
SPACK_ALIAS="$possible_alias"
|
||||
}
|
||||
|
||||
# If all commands in COMPREPLY alias to the same thing, set COMPREPLY to
|
||||
# just the real command, not the aliases.
|
||||
_spack_compress_aliases() {
|
||||
# If there are zero or one completions, don't do anything
|
||||
# If this isn't the first argument, bail because aliases currently only apply
|
||||
# to top-level commands.
|
||||
if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# get the alias of the first thing in the list of completions
|
||||
_spack_get_alias "${COMPREPLY[@]:0:1}"
|
||||
local first_alias="$SPACK_ALIAS"
|
||||
|
||||
# if anything in the list would alias to something different, stop
|
||||
for comp in "${COMPREPLY[@]:1}"; do
|
||||
_spack_get_alias "$comp"
|
||||
if [ "$SPACK_ALIAS" != "$first_alias" ]; then
|
||||
return
|
||||
fi
|
||||
done
|
||||
|
||||
# all commands alias to first alias; just return that
|
||||
COMPREPLY=("$first_alias")
|
||||
}
|
||||
|
||||
# Spack commands
|
||||
#
|
||||
# Everything below here is auto-generated.
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
stages: [ "generate", "build", "publish" ]
|
||||
|
||||
variables:
|
||||
SPACK_DISABLE_LOCAL_CONFIG: "1"
|
||||
SPACK_USER_CACHE_PATH: "${CI_PROJECT_DIR}/tmp/_user_cache/"
|
||||
|
||||
default:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
|
||||
|
||||
@@ -65,8 +69,6 @@ default:
|
||||
.base-job:
|
||||
variables:
|
||||
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
SPACK_DISABLE_LOCAL_CONFIG: "1"
|
||||
SPACK_USER_CACHE_PATH: "${CI_PROJECT_DIR}/tmp/_user_cache/"
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_NAME == "develop"
|
||||
# Pipelines on develop only rebuild what is missing from the mirror
|
||||
@@ -136,6 +138,9 @@ default:
|
||||
variables:
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
# avoid moving targets like branches and tags
|
||||
SPACK_CONCRETIZER_REQUIRE_CHECKSUM: 1
|
||||
SPACK_BACKTRACE: 1
|
||||
interruptible: true
|
||||
timeout: 60 minutes
|
||||
retry:
|
||||
|
||||
@@ -20,7 +20,11 @@ ci:
|
||||
- k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
||||
- k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
||||
script::
|
||||
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
- - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
- - spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
|
||||
--prefix /home/software/spack:${CI_PROJECT_DIR}
|
||||
--log install_times.json
|
||||
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json
|
||||
after_script:
|
||||
- - cat /proc/loadavg || true
|
||||
variables:
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
packages: {}
|
||||
|
||||
# CI should never build develop/main/master versions of packages. Current issues:
|
||||
# - e4s/dav
|
||||
# - hdf5-vol-async => argobot@main
|
||||
# - aws-isc-aarch64
|
||||
# - sse2neon
|
||||
|
||||
# packages:
|
||||
# all:
|
||||
# require: "@:999999999"
|
||||
packages:
|
||||
#all:
|
||||
# CI should never build develop/main/master versions of
|
||||
# packages.
|
||||
# Current issues:
|
||||
# - e4s/dav
|
||||
# - hdf5-vol-async => argobot@main
|
||||
# - aws-isc-aarch64
|
||||
# - sse2neon
|
||||
#require: "@:999999999"
|
||||
python:
|
||||
# This is required after https://github.com/spack/spack/pull/38619
|
||||
# to avoid concretizing deprecated python.
|
||||
require: "@3.8:"
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env spack-python
|
||||
"""
|
||||
This script is meant to be run using:
|
||||
`spack python aggregate_logs.spack.py`
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def find_logs(prefix, filename):
|
||||
for root, _, files in os.walk(prefix):
|
||||
if filename in files:
|
||||
yield os.path.join(root, filename)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import json
|
||||
from argparse import ArgumentParser
|
||||
|
||||
parser = ArgumentParser("aggregate_logs")
|
||||
parser.add_argument("output_file")
|
||||
parser.add_argument("--log", default="install_times.json")
|
||||
parser.add_argument("--prefix", required=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
prefixes = [p for p in args.prefix.split(":") if os.path.exists(p)]
|
||||
|
||||
# Aggregate the install timers into a single json
|
||||
data = []
|
||||
for prefix in prefixes:
|
||||
time_logs = find_logs(prefix, args.log)
|
||||
for log in time_logs:
|
||||
with open(log) as fd:
|
||||
data.append(json.load(fd))
|
||||
|
||||
with open(args.output_file, "w") as fd:
|
||||
json.dump(data, fd)
|
||||
@@ -3,51 +3,50 @@ spack:
|
||||
packages:
|
||||
cmake:
|
||||
variants: ~ownlibs
|
||||
ecp-data-vis-sdk:
|
||||
require:
|
||||
- one_of:
|
||||
- +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs
|
||||
+veloc +vtkm +zfp
|
||||
- one_of:
|
||||
- +paraview ~visit
|
||||
- ~paraview +visit
|
||||
hdf5:
|
||||
require:
|
||||
- one_of: ['@1.14', '@1.12']
|
||||
mesa:
|
||||
require: "+glx +osmesa +opengl ~opengles +llvm"
|
||||
require: +glx +osmesa +opengl ~opengles +llvm
|
||||
libosmesa:
|
||||
require: "mesa +osmesa"
|
||||
require: mesa +osmesa
|
||||
libglx:
|
||||
require: "mesa +glx"
|
||||
require: mesa +glx
|
||||
ospray:
|
||||
require: "@2.8.0 +denoiser +mpi"
|
||||
require: '@2.8.0 +denoiser +mpi'
|
||||
llvm:
|
||||
require: "@14:"
|
||||
require: '@14:'
|
||||
# Minimize LLVM
|
||||
variants: "~lldb~lld~polly~gold libunwind=none compiler-rt=none"
|
||||
variants: ~lldb~lld~polly~gold libunwind=none compiler-rt=none
|
||||
all:
|
||||
require: target=x86_64_v3
|
||||
|
||||
definitions:
|
||||
- paraview_specs:
|
||||
- matrix:
|
||||
- - paraview
|
||||
- - +qt~osmesa ^glew gl=glx # GUI Support w/ GLX Rendering
|
||||
- ~qt~osmesa ^glew gl=glx # GLX Rendering
|
||||
- ~qt+osmesa ^glew gl=osmesa # OSMesa Rendering
|
||||
- - paraview +raytracing
|
||||
- - +qt~osmesa # GUI Support w/ GLX Rendering
|
||||
- ~qt~osmesa # GLX Rendering
|
||||
- ~qt+osmesa # OSMesa Rendering
|
||||
- visit_specs:
|
||||
- matrix:
|
||||
- - visit
|
||||
- - +gui~osmesa # GUI Support w/ GLX Rendering
|
||||
- ~gui~osmesa # GLX Rendering
|
||||
- - ~gui~osmesa # GLX Rendering
|
||||
- ~gui+osmesa # OSMesa Rendering
|
||||
# VisIt GUI does not work with Qt 5.14.2
|
||||
# - +gui~osmesa # GUI Support w/ GLX Rendering
|
||||
- sdk_base_spec:
|
||||
- matrix:
|
||||
- - ecp-data-vis-sdk
|
||||
+ascent
|
||||
+adios2
|
||||
+cinema
|
||||
+darshan
|
||||
+faodel
|
||||
+hdf5
|
||||
+paraview
|
||||
+pnetcdf
|
||||
+sensei
|
||||
+sz
|
||||
+unifyfs
|
||||
+veloc
|
||||
+vtkm
|
||||
+zfp
|
||||
- - ecp-data-vis-sdk +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf
|
||||
+sensei +sz +unifyfs +veloc +vtkm +zfp
|
||||
- - ~cuda ~rocm
|
||||
# Current testing of GPU supported configurations
|
||||
# is provided in the E4S stack
|
||||
@@ -55,22 +54,20 @@ spack:
|
||||
# - ~cuda +rocm
|
||||
|
||||
specs:
|
||||
# Test ParaView builds with different GL backends
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
- [$^paraview_specs]
|
||||
- - ^hdf5@1.14 # Non-VisIt can build HDF5 1.14
|
||||
# Test ParaView builds with differnt GL backends
|
||||
# - matrix:
|
||||
# - [$sdk_base_spec]
|
||||
# - [$^visit_specs]
|
||||
# Test ParaView and VisIt builds with different GL backends
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
- [$^paraview_specs]
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
- [$^visit_specs]
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/data-vis-sdk" }
|
||||
mirrors: {mirror: s3://spack-binaries/develop/data-vis-sdk}
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01", "entrypoint": [""] }
|
||||
image: {name: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group:: Data and Vis SDK
|
||||
'build-group:': Data and Vis SDK
|
||||
|
||||
@@ -224,7 +224,7 @@ spack:
|
||||
|
||||
# GPU
|
||||
- aml +ze
|
||||
- amrex +sycl dimensions=3
|
||||
- amrex +sycl
|
||||
- arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples
|
||||
- cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples
|
||||
- kokkos +sycl +openmp cxxstd=17 +tests +examples
|
||||
|
||||
@@ -29,8 +29,8 @@ spack:
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# MXNet
|
||||
- mxnet
|
||||
# MXNet not supported on darwin aarch64 yet
|
||||
# - mxnet
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
|
||||
@@ -61,6 +61,15 @@ contains 'python' _spack_completions spack extensions ''
|
||||
contains 'hdf5' _spack_completions spack -d install --jobs 8 ''
|
||||
contains 'hdf5' _spack_completions spack install -v ''
|
||||
|
||||
title 'Testing alias handling'
|
||||
contains 'concretize' _spack_completions spack c
|
||||
contains 'concretise' _spack_completions spack c
|
||||
contains 'concretize' _spack_completions spack conc
|
||||
does_not_contain 'concretise' _spack_completions spack conc
|
||||
|
||||
does_not_contain 'concretize' _spack_completions spack isnotacommand
|
||||
does_not_contain 'concretize' _spack_completions spack env isnotacommand
|
||||
|
||||
# XFAIL: Fails for Python 2.6 because pkg_resources not found?
|
||||
#contains 'compilers.py' _spack_completions spack unit-test ''
|
||||
|
||||
|
||||
@@ -52,6 +52,20 @@ if test -n "${ZSH_VERSION:-}" ; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# compgen -W doesn't work in some versions of zsh, so use this instead.
|
||||
# see https://www.zsh.org/mla/workers/2011/msg00582.html
|
||||
_compgen_w() {
|
||||
if test -n "${ZSH_VERSION:-}" ; then
|
||||
typeset -a words
|
||||
words=( ${~=1} )
|
||||
local find="$2"
|
||||
results=(${(M)words[@]:#$find*})
|
||||
echo "${results[@]}"
|
||||
else
|
||||
compgen -W "$1" -- "$2"
|
||||
fi
|
||||
}
|
||||
|
||||
# Bash programmable completion for Spack
|
||||
_bash_completion_spack() {
|
||||
# In all following examples, let the cursor be denoted by brackets, i.e. []
|
||||
@@ -137,8 +151,11 @@ _bash_completion_spack() {
|
||||
if [[ "$(LC_ALL=C type $subfunction 2>&1)" =~ $rgx ]]
|
||||
then
|
||||
$subfunction
|
||||
COMPREPLY=($(compgen -W "$SPACK_COMPREPLY" -- "$cur"))
|
||||
COMPREPLY=($(_compgen_w "$SPACK_COMPREPLY" "$cur"))
|
||||
fi
|
||||
|
||||
# if every completion is an alias for the same thing, just return that thing.
|
||||
_spack_compress_aliases
|
||||
}
|
||||
|
||||
# Helper functions for subcommands
|
||||
@@ -328,16 +345,63 @@ _spacktivate() {
|
||||
_spack_env_activate
|
||||
}
|
||||
|
||||
# Simple function to get the spack alias for a command
|
||||
_spack_get_alias() {
|
||||
local possible_alias="${1-}"
|
||||
local IFS=";"
|
||||
|
||||
# spack aliases are a ;-separated list of :-separated pairs
|
||||
for item in $SPACK_ALIASES; do
|
||||
# maps a possible alias to its command
|
||||
eval "local real_command=\"\${item#*${possible_alias}:}\""
|
||||
if [ "$real_command" != "$item" ]; then
|
||||
SPACK_ALIAS="$real_command"
|
||||
return
|
||||
fi
|
||||
done
|
||||
|
||||
# no alias found -- just return $1
|
||||
SPACK_ALIAS="$possible_alias"
|
||||
}
|
||||
|
||||
# If all commands in COMPREPLY alias to the same thing, set COMPREPLY to
|
||||
# just the real command, not the aliases.
|
||||
_spack_compress_aliases() {
|
||||
# If there are zero or one completions, don't do anything
|
||||
# If this isn't the first argument, bail because aliases currently only apply
|
||||
# to top-level commands.
|
||||
if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# get the alias of the first thing in the list of completions
|
||||
_spack_get_alias "${COMPREPLY[@]:0:1}"
|
||||
local first_alias="$SPACK_ALIAS"
|
||||
|
||||
# if anything in the list would alias to something different, stop
|
||||
for comp in "${COMPREPLY[@]:1}"; do
|
||||
_spack_get_alias "$comp"
|
||||
if [ "$SPACK_ALIAS" != "$first_alias" ]; then
|
||||
return
|
||||
fi
|
||||
done
|
||||
|
||||
# all commands alias to first alias; just return that
|
||||
COMPREPLY=("$first_alias")
|
||||
}
|
||||
|
||||
# Spack commands
|
||||
#
|
||||
# Everything below here is auto-generated.
|
||||
SPACK_ALIASES="concretise:concretize;containerise:containerize;rm:remove"
|
||||
|
||||
|
||||
_spack() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace --backtrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize config containerize create debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -498,7 +562,7 @@ _spack_buildcache() {
|
||||
_spack_buildcache_push() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -507,7 +571,7 @@ _spack_buildcache_push() {
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -737,6 +801,10 @@ _spack_concretize() {
|
||||
SPACK_COMPREPLY="-h --help -f --force --test -q --quiet -U --fresh --reuse --reuse-deps -j --jobs"
|
||||
}
|
||||
|
||||
_spack_concretise() {
|
||||
SPACK_COMPREPLY="-h --help -f --force --test -q --quiet -U --fresh --reuse --reuse-deps -j --jobs"
|
||||
}
|
||||
|
||||
_spack_config() {
|
||||
if $list_options
|
||||
then
|
||||
@@ -830,6 +898,10 @@ _spack_containerize() {
|
||||
SPACK_COMPREPLY="-h --help --list-os --last-stage"
|
||||
}
|
||||
|
||||
_spack_containerise() {
|
||||
SPACK_COMPREPLY="-h --help --list-os --last-stage"
|
||||
}
|
||||
|
||||
_spack_create() {
|
||||
if $list_options
|
||||
then
|
||||
@@ -942,7 +1014,7 @@ _spack_env_activate() {
|
||||
}
|
||||
|
||||
_spack_env_deactivate() {
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat"
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh"
|
||||
}
|
||||
|
||||
_spack_env_create() {
|
||||
@@ -1049,7 +1121,7 @@ _spack_external() {
|
||||
_spack_external_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --exclude -p --path --scope --all -t --tag"
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --exclude -p --path --scope --all -t --tag -j --jobs"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1234,7 +1306,7 @@ _spack_list() {
|
||||
_spack_load() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --first --only --list"
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh --first --only --list"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
@@ -1852,7 +1924,7 @@ _spack_unit_test() {
|
||||
_spack_unload() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat -a --all"
|
||||
SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh -a --all"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user