Compare commits
339 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
783bbdf2db | ||
|
|
a65f13f79f | ||
|
|
fc391d5332 | ||
|
|
e05f12f18e | ||
|
|
14f248652c | ||
|
|
87f99de3fb | ||
|
|
9e7fe04a77 | ||
|
|
45d149c7d3 | ||
|
|
8250a085b0 | ||
|
|
39b9f214a8 | ||
|
|
7631b5ea14 | ||
|
|
a4d2f8332f | ||
|
|
007f02e06a | ||
|
|
8ec1657136 | ||
|
|
c5fc794d77 | ||
|
|
5c409f794a | ||
|
|
06b30842e4 | ||
|
|
ebbe63013d | ||
|
|
3f7f10ca2b | ||
|
|
6a5a074150 | ||
|
|
c046c61cff | ||
|
|
7bd95f6ad3 | ||
|
|
4429e17db0 | ||
|
|
65dd6e1196 | ||
|
|
a43da48d23 | ||
|
|
f9c06669ca | ||
|
|
11c6431c9a | ||
|
|
1e85a1b227 | ||
|
|
b81aa42179 | ||
|
|
c59f68a33d | ||
|
|
743a93902d | ||
|
|
5bc5139552 | ||
|
|
3be450c16f | ||
|
|
c733fe9c34 | ||
|
|
e2edb45d2c | ||
|
|
b2a95fb4b7 | ||
|
|
7bf7a266ba | ||
|
|
2341074694 | ||
|
|
1c0dbab821 | ||
|
|
865c8b606c | ||
|
|
c98afbc44c | ||
|
|
57cd822fb7 | ||
|
|
627c2d3bf6 | ||
|
|
3b1b261cd8 | ||
|
|
40c4c81c19 | ||
|
|
642451e047 | ||
|
|
6630ddb47d | ||
|
|
7fd7d0b9fd | ||
|
|
f7d71ec792 | ||
|
|
d80bc70481 | ||
|
|
81cfe39ae3 | ||
|
|
ed058fd212 | ||
|
|
1da12490fa | ||
|
|
8b5b4ade0e | ||
|
|
12bc4cf093 | ||
|
|
f8676db7f4 | ||
|
|
dd747c5c48 | ||
|
|
cf031e83f0 | ||
|
|
f709518916 | ||
|
|
aa9eb33108 | ||
|
|
818c9aeb5a | ||
|
|
cfdf19ed6b | ||
|
|
566754440f | ||
|
|
f0658243c0 | ||
|
|
06b6b05dbd | ||
|
|
189cd59d13 | ||
|
|
5a43f4ba55 | ||
|
|
29aa7117f4 | ||
|
|
d367b4285a | ||
|
|
260e735425 | ||
|
|
ca872f9c34 | ||
|
|
b72a268bc5 | ||
|
|
818195a3bd | ||
|
|
679d41ea66 | ||
|
|
86216cc36e | ||
|
|
ecb7ad493f | ||
|
|
fb1e81657c | ||
|
|
34e4c62e8c | ||
|
|
acb02326aa | ||
|
|
c1756257c2 | ||
|
|
1ee7c735ec | ||
|
|
22deed708e | ||
|
|
6693dc5eb8 | ||
|
|
396f219011 | ||
|
|
a3ecd7efed | ||
|
|
ae5511afd6 | ||
|
|
78fe2c63fa | ||
|
|
f4f396745e | ||
|
|
f3c6d892b1 | ||
|
|
2f5988cec7 | ||
|
|
44922f734d | ||
|
|
144e657c58 | ||
|
|
6f48fe2b6f | ||
|
|
fcd03adc02 | ||
|
|
0620b954be | ||
|
|
6174b829f7 | ||
|
|
0d4b1c6a73 | ||
|
|
fb9797bd67 | ||
|
|
4eee3c12c1 | ||
|
|
3e5f9a2138 | ||
|
|
8295a45999 | ||
|
|
5138c71d34 | ||
|
|
eef9939c21 | ||
|
|
ffddaabaa0 | ||
|
|
f664d1edaa | ||
|
|
6d5d1562bd | ||
|
|
70c71e8f93 | ||
|
|
d9d1eb24f9 | ||
|
|
cef59ad0bf | ||
|
|
a1e117a98b | ||
|
|
cb855d5ffd | ||
|
|
3866ff0096 | ||
|
|
6dc167e43d | ||
|
|
0fd085be8e | ||
|
|
74fba221f1 | ||
|
|
deeeb86067 | ||
|
|
98daf5b7ec | ||
|
|
8a3d98b632 | ||
|
|
0cc945b367 | ||
|
|
e732155e8c | ||
|
|
c07fb833a9 | ||
|
|
7d566b481f | ||
|
|
a72e5e762e | ||
|
|
0eb22ef770 | ||
|
|
95f78440f1 | ||
|
|
74a51aba50 | ||
|
|
b370ecfbda | ||
|
|
04d55b7600 | ||
|
|
d695438851 | ||
|
|
f0447d63ad | ||
|
|
e8a7a04f14 | ||
|
|
23316f0352 | ||
|
|
b3433cb872 | ||
|
|
349ba83bc6 | ||
|
|
ecfd9ef12b | ||
|
|
4502351659 | ||
|
|
8a08f09ac0 | ||
|
|
60ecd0374e | ||
|
|
52ccee79d8 | ||
|
|
7f0f1b63d6 | ||
|
|
b65f1f22ec | ||
|
|
e9efa1df75 | ||
|
|
884a5b8b07 | ||
|
|
91d674f5d0 | ||
|
|
76fbb8cd8f | ||
|
|
0f3f2a8024 | ||
|
|
5a5f774369 | ||
|
|
f5212ae139 | ||
|
|
4b618704bf | ||
|
|
46285d9725 | ||
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
|
|||||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -62,7 +62,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -99,7 +99,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -158,7 +158,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison@2.7 tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -179,7 +179,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -ex
|
||||||
@@ -204,7 +204,7 @@ jobs:
|
|||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -247,7 +247,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -283,7 +283,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
gawk
|
gawk
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -316,7 +316,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -333,7 +333,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
|||||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
|||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
|
|
||||||
- name: Set Container Tag Normal (Nightly)
|
- name: Set Container Tag Normal (Nightly)
|
||||||
run: |
|
run: |
|
||||||
@@ -86,7 +86,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
@@ -95,7 +95,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||||
|
|||||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|||||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
|
|||||||
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -152,7 +152,7 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
@@ -187,7 +187,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10"]
|
python-version: ["3.10"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
|||||||
9
.github/workflows/windows_python.yml
vendored
9
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
@@ -63,7 +63,7 @@ jobs:
|
|||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||||
@@ -75,6 +75,5 @@ jobs:
|
|||||||
- name: Build Test
|
- name: Build Test
|
||||||
run: |
|
run: |
|
||||||
spack compiler find
|
spack compiler find
|
||||||
spack external find cmake
|
spack -d external find cmake ninja
|
||||||
spack external find ninja
|
|
||||||
spack -d install abseil-cpp
|
spack -d install abseil-cpp
|
||||||
|
|||||||
32
SECURITY.md
32
SECURITY.md
@@ -2,24 +2,26 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
We provide security updates for the following releases.
|
We provide security updates for `develop` and for the last two
|
||||||
|
stable (`0.x`) release series of Spack. Security updates will be
|
||||||
|
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||||
|
|
||||||
For more on Spack's release structure, see
|
For more on Spack's release structure, see
|
||||||
[`README.md`](https://github.com/spack/spack#releases).
|
[`README.md`](https://github.com/spack/spack#releases).
|
||||||
|
|
||||||
|
|
||||||
| Version | Supported |
|
|
||||||
| ------- | ------------------ |
|
|
||||||
| develop | :white_check_mark: |
|
|
||||||
| 0.19.x | :white_check_mark: |
|
|
||||||
| 0.18.x | :white_check_mark: |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
To report a vulnerability or other security
|
You can report a vulnerability using GitHub's private reporting
|
||||||
issue, email maintainers@spack.io.
|
feature:
|
||||||
|
|
||||||
You can expect to hear back within two days.
|
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||||
If your security issue is accepted, we will do
|
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||||
our best to release a fix within a week. If
|
3. Fill out the form and submit your draft security advisory.
|
||||||
fixing the issue will take longer than this,
|
|
||||||
we will discuss timeline options with you.
|
More details are available in
|
||||||
|
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||||
|
|
||||||
|
You can expect to hear back about security issues within two days.
|
||||||
|
If your security issue is accepted, we will do our best to release
|
||||||
|
a fix within a week. If fixing the issue will take longer than
|
||||||
|
this, we will discuss timeline options with you.
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
::
|
::
|
||||||
@echo off
|
@echo off
|
||||||
|
|
||||||
set spack=%SPACK_ROOT%\bin\spack
|
set spack="%SPACK_ROOT%"\bin\spack
|
||||||
|
|
||||||
::#######################################################################
|
::#######################################################################
|
||||||
:: This is a wrapper around the spack command that forwards calls to
|
:: This is a wrapper around the spack command that forwards calls to
|
||||||
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
|||||||
:: subcommands will never start with '-'
|
:: subcommands will never start with '-'
|
||||||
:: everything after the subcommand is an arg
|
:: everything after the subcommand is an arg
|
||||||
|
|
||||||
:: we cannot allow batch "for" loop to directly process CL args
|
|
||||||
:: a number of batch reserved characters are commonly passed to
|
|
||||||
:: spack and allowing batch's "for" method to process the raw inputs
|
|
||||||
:: results in a large number of formatting issues
|
|
||||||
:: instead, treat the entire CLI as one string
|
|
||||||
:: and split by space manually
|
|
||||||
:: capture cl args in variable named cl_args
|
|
||||||
set cl_args=%*
|
|
||||||
:process_cl_args
|
:process_cl_args
|
||||||
rem tokens=1* returns the first processed token produced
|
rem Set first cl argument (denoted by %1) to be processed
|
||||||
rem by tokenizing the input string cl_args on spaces into
|
set t=%1
|
||||||
rem the named variable %%g
|
rem shift moves all cl positional arguments left by one
|
||||||
rem While this make look like a for loop, it only
|
rem meaning %2 is now %1, this allows us to iterate over each
|
||||||
rem executes a single time for each of the cl args
|
rem argument
|
||||||
rem the actual iterative loop is performed by the
|
shift
|
||||||
rem goto process_cl_args stanza
|
rem assign next "first" cl argument to cl_args, will be null when
|
||||||
rem we are simply leveraging the "for" method's string
|
rem there are now further arguments to process
|
||||||
rem tokenization
|
set cl_args=%1
|
||||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
if "!t:~0,1!" == "-" (
|
||||||
set t=%%~g
|
if defined _sp_subcommand (
|
||||||
rem remainder of string is composed into %%h
|
rem We already have a subcommand, processing args now
|
||||||
rem these are the cl args yet to be processed
|
|
||||||
rem assign cl_args var to only the args to be processed
|
|
||||||
rem effectively discarding the current arg %%g
|
|
||||||
rem this will be nul when we have no further tokens to process
|
|
||||||
set cl_args=%%h
|
|
||||||
rem process the first space delineated cl arg
|
|
||||||
rem of this iteration
|
|
||||||
if "!t:~0,1!" == "-" (
|
|
||||||
if defined _sp_subcommand (
|
|
||||||
rem We already have a subcommand, processing args now
|
|
||||||
if not defined _sp_args (
|
|
||||||
set "_sp_args=!t!"
|
|
||||||
) else (
|
|
||||||
set "_sp_args=!_sp_args! !t!"
|
|
||||||
)
|
|
||||||
) else (
|
|
||||||
if not defined _sp_flags (
|
|
||||||
set "_sp_flags=!t!"
|
|
||||||
shift
|
|
||||||
) else (
|
|
||||||
set "_sp_flags=!_sp_flags! !t!"
|
|
||||||
shift
|
|
||||||
)
|
|
||||||
)
|
|
||||||
) else if not defined _sp_subcommand (
|
|
||||||
set "_sp_subcommand=!t!"
|
|
||||||
shift
|
|
||||||
) else (
|
|
||||||
if not defined _sp_args (
|
if not defined _sp_args (
|
||||||
set "_sp_args=!t!"
|
set "_sp_args=!t!"
|
||||||
shift
|
|
||||||
) else (
|
) else (
|
||||||
set "_sp_args=!_sp_args! !t!"
|
set "_sp_args=!_sp_args! !t!"
|
||||||
shift
|
)
|
||||||
|
) else (
|
||||||
|
if not defined _sp_flags (
|
||||||
|
set "_sp_flags=!t!"
|
||||||
|
) else (
|
||||||
|
set "_sp_flags=!_sp_flags! !t!"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
) else if not defined _sp_subcommand (
|
||||||
|
set "_sp_subcommand=!t!"
|
||||||
|
) else (
|
||||||
|
if not defined _sp_args (
|
||||||
|
set "_sp_args=!t!"
|
||||||
|
) else (
|
||||||
|
set "_sp_args=!_sp_args! !t!"
|
||||||
|
)
|
||||||
)
|
)
|
||||||
rem if this is not nil, we have more tokens to process
|
|
||||||
|
rem if this is not nu;ll, we have more tokens to process
|
||||||
rem start above process again with remaining unprocessed cl args
|
rem start above process again with remaining unprocessed cl args
|
||||||
if defined cl_args goto :process_cl_args
|
if defined cl_args goto :process_cl_args
|
||||||
|
|
||||||
|
|||||||
@@ -39,12 +39,26 @@ function Read-SpackArgs {
|
|||||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function Set-SpackEnv {
|
||||||
|
# This method is responsible
|
||||||
|
# for processing the return from $(spack <command>)
|
||||||
|
# which are returned as System.Object[]'s containing
|
||||||
|
# a list of env commands
|
||||||
|
# Invoke-Expression can only handle one command at a time
|
||||||
|
# so we iterate over the list to invoke the env modification
|
||||||
|
# expressions one at a time
|
||||||
|
foreach($envop in $args[0]){
|
||||||
|
Invoke-Expression $envop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function Invoke-SpackCD {
|
function Invoke-SpackCD {
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||||
if (($NULL -ne $LOC)){
|
if (($NULL -ne $LOC)){
|
||||||
if ( Test-Path -Path $LOC){
|
if ( Test-Path -Path $LOC){
|
||||||
Set-Location $LOC
|
Set-Location $LOC
|
||||||
@@ -61,7 +75,7 @@ function Invoke-SpackCD {
|
|||||||
|
|
||||||
function Invoke-SpackEnv {
|
function Invoke-SpackEnv {
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env -h
|
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||||
@@ -69,46 +83,46 @@ function Invoke-SpackEnv {
|
|||||||
switch ($SubCommandSubCommand) {
|
switch ($SubCommandSubCommand) {
|
||||||
"activate" {
|
"activate" {
|
||||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||||
}
|
}
|
||||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||||
}
|
}
|
||||||
elseif (!$SubCommandSubCommandArgs) {
|
elseif (!$SubCommandSubCommandArgs) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
Set-SpackEnv $SpackEnv
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"deactivate" {
|
"deactivate" {
|
||||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||||
}
|
}
|
||||||
elseif($SubCommandSubCommandArgs) {
|
elseif($SubCommandSubCommandArgs) {
|
||||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
Set-SpackEnv $SpackEnv
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function Invoke-SpackLoad {
|
function Invoke-SpackLoad {
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
}
|
}
|
||||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
Set-SpackEnv $SpackEnv
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,7 +130,7 @@ function Invoke-SpackLoad {
|
|||||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||||
|
|
||||||
if (Compare-CommonArgs $SpackCMD_params) {
|
if (Compare-CommonArgs $SpackCMD_params) {
|
||||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
exit $LASTEXITCODE
|
exit $LASTEXITCODE
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,5 +142,5 @@ switch($SpackSubCommand)
|
|||||||
"env" {Invoke-SpackEnv}
|
"env" {Invoke-SpackEnv}
|
||||||
"load" {Invoke-SpackLoad}
|
"load" {Invoke-SpackLoad}
|
||||||
"unload" {Invoke-SpackLoad}
|
"unload" {Invoke-SpackLoad}
|
||||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,3 +36,9 @@ concretizer:
|
|||||||
# on each root spec, allowing different versions and variants of the same package in
|
# on each root spec, allowing different versions and variants of the same package in
|
||||||
# an environment.
|
# an environment.
|
||||||
unify: true
|
unify: true
|
||||||
|
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||||
|
duplicates:
|
||||||
|
# "none": allows a single node for any package in the DAG.
|
||||||
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
|
strategy: none
|
||||||
@@ -60,7 +60,7 @@ packages:
|
|||||||
xxd: [xxd-standalone, vim]
|
xxd: [xxd-standalone, vim]
|
||||||
yacc: [bison, byacc]
|
yacc: [bison, byacc]
|
||||||
ziglang: [zig]
|
ziglang: [zig]
|
||||||
zlib-api: [zlib, zlib-ng+compat]
|
zlib-api: [zlib-ng+compat, zlib]
|
||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
|||||||
@@ -9,9 +9,32 @@
|
|||||||
Bundle
|
Bundle
|
||||||
------
|
------
|
||||||
|
|
||||||
``BundlePackage`` represents a set of packages that are expected to work well
|
``BundlePackage`` represents a set of packages that are expected to work
|
||||||
together, such as a collection of commonly used software libraries. The
|
well together, such as a collection of commonly used software libraries.
|
||||||
associated software is specified as bundle dependencies.
|
The associated software is specified as dependencies.
|
||||||
|
|
||||||
|
If it makes sense, variants, conflicts, and requirements can be added to
|
||||||
|
the package. :ref:`Variants <variants>` ensure that common build options
|
||||||
|
are consistent across the packages supporting them. :ref:`Conflicts
|
||||||
|
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||||
|
bugs or limitations.
|
||||||
|
|
||||||
|
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||||
|
it could use the ``require`` directive as follows:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||||
|
|
||||||
|
Spack has a number of built-in bundle packages, such as:
|
||||||
|
|
||||||
|
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||||
|
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||||
|
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||||
|
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||||
|
|
||||||
|
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||||
|
``Libc`` is a virtual bundle package for the C standard library.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|||||||
113
lib/spack/docs/gpu_configuration.rst
Normal file
113
lib/spack/docs/gpu_configuration.rst
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
==========================
|
||||||
|
Using External GPU Support
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||||
|
configuration Spack will download and install the needed components.
|
||||||
|
It may be preferable to use existing system support: the following sections
|
||||||
|
help with using a system installation of GPU libraries.
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
Using an External ROCm Installation
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
Spack breaks down ROCm into many separate component packages. The following
|
||||||
|
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||||
|
components for use by dependent packages:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
compiler: [rocmcc@=5.3.0]
|
||||||
|
variants: amdgpu_target=gfx90a
|
||||||
|
hip:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: hip@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/hip
|
||||||
|
hsa-rocr-dev:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: hsa-rocr-dev@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/
|
||||||
|
llvm-amdgpu:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: llvm-amdgpu@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/llvm/
|
||||||
|
comgr:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: comgr@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/
|
||||||
|
hipsparse:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: hipsparse@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/
|
||||||
|
hipblas:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: hipblas@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/
|
||||||
|
rocblas:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: rocblas@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/
|
||||||
|
rocprim:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: rocprim@5.3.0
|
||||||
|
prefix: /opt/rocm-5.3.0/rocprim/
|
||||||
|
|
||||||
|
This is in combination with the following compiler definition:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
spec: rocmcc@=5.3.0
|
||||||
|
paths:
|
||||||
|
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||||
|
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||||
|
f77: null
|
||||||
|
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||||
|
operating_system: rhel8
|
||||||
|
target: x86_64
|
||||||
|
|
||||||
|
This includes the following considerations:
|
||||||
|
|
||||||
|
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||||
|
to use only the externals we defined.
|
||||||
|
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||||
|
packages, but not all of them, and furthermore not in a manner that
|
||||||
|
guarantees a complementary set if multiple ROCm installations are available.
|
||||||
|
- The ``prefix`` is the same for several components, but note that others
|
||||||
|
require listing one of the subdirectories as a prefix.
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
Using an External CUDA Installation
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
CUDA is split into fewer components and is simpler to specify:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
variants:
|
||||||
|
- cuda_arch=70
|
||||||
|
cuda:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: cuda@11.0.2
|
||||||
|
prefix: /opt/cuda/cuda-11.0.2/
|
||||||
|
|
||||||
|
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||||
@@ -77,6 +77,7 @@ or refer to the full manual below.
|
|||||||
extensions
|
extensions
|
||||||
pipelines
|
pipelines
|
||||||
signing
|
signing
|
||||||
|
gpu_configuration
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|||||||
@@ -363,6 +363,42 @@ one of these::
|
|||||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||||
``nano``, and ``notepad``, in that order.
|
``nano``, and ``notepad``, in that order.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
Bundling software
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If you have a collection of software expected to work well together with
|
||||||
|
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||||
|
Examples where bundle packages can be useful include defining suites of
|
||||||
|
applications (e.g, `EcpProxyApps
|
||||||
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||||
|
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||||
|
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||||
|
|
||||||
|
These versioned packages primarily consist of dependencies on the associated
|
||||||
|
software packages. They can include :ref:`variants <variants>` to ensure
|
||||||
|
common build options are consistently applied to dependencies. Known build
|
||||||
|
failures, such as not building on a platform or when certain compilers or
|
||||||
|
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
||||||
|
Build requirements, such as only building with specific compilers, can similarly
|
||||||
|
be flagged with :ref:`requires <packaging_conflicts>`.
|
||||||
|
|
||||||
|
The ``spack create --template bundle`` command will create a skeleton
|
||||||
|
``BundlePackage`` ``package.py`` for you:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack create --template bundle --name coolsdk
|
||||||
|
|
||||||
|
Now you can fill in the basic package documentation, version(s), and software
|
||||||
|
package dependencies along with any other relevant customizations.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Remember that bundle packages have no software of their own so there
|
||||||
|
is nothing to download.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Non-downloadable software
|
Non-downloadable software
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -610,7 +646,16 @@ add a line like this in the package class:
|
|||||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||||
|
|
||||||
Versions should be listed in descending order, from newest to oldest.
|
.. note::
|
||||||
|
|
||||||
|
By convention, we list versions in descending order, from newest to oldest.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
||||||
|
there is nothing to fetch. Consequently, their version directives
|
||||||
|
consist solely of the version name (e.g., ``version("202309")``).
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Date Versions
|
Date Versions
|
||||||
@@ -2678,7 +2723,7 @@ Conflicts and requirements
|
|||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||||
to build e.g. against other dependencies or with certain compilers. Spack
|
from building e.g. against other dependencies or with certain compilers. Spack
|
||||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||||
|
|
||||||
Adding the following to a package:
|
Adding the following to a package:
|
||||||
@@ -4773,17 +4818,17 @@ For example, running:
|
|||||||
|
|
||||||
results in spack checking that the installation created the following **file**:
|
results in spack checking that the installation created the following **file**:
|
||||||
|
|
||||||
* ``self.prefix/bin/reframe``
|
* ``self.prefix.bin.reframe``
|
||||||
|
|
||||||
and the following **directories**:
|
and the following **directories**:
|
||||||
|
|
||||||
* ``self.prefix/bin``
|
* ``self.prefix.bin``
|
||||||
* ``self.prefix/config``
|
* ``self.prefix.config``
|
||||||
* ``self.prefix/docs``
|
* ``self.prefix.docs``
|
||||||
* ``self.prefix/reframe``
|
* ``self.prefix.reframe``
|
||||||
* ``self.prefix/tutorials``
|
* ``self.prefix.tutorials``
|
||||||
* ``self.prefix/unittests``
|
* ``self.prefix.unittests``
|
||||||
* ``self.prefix/cscs-checks``
|
* ``self.prefix.cscs-checks``
|
||||||
|
|
||||||
If **any** of these paths are missing, then Spack considers the installation
|
If **any** of these paths are missing, then Spack considers the installation
|
||||||
to have failed.
|
to have failed.
|
||||||
@@ -4927,7 +4972,7 @@ installed executable. The check is implemented as follows:
|
|||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def check_list(self):
|
def check_list(self):
|
||||||
with working_dir(self.stage.source_path):
|
with working_dir(self.stage.source_path):
|
||||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
reframe = Executable(self.prefix.bin.reframe)
|
||||||
reframe("-l")
|
reframe("-l")
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@@ -5147,8 +5192,8 @@ embedded test parts.
|
|||||||
for example in ["ex1", "ex2"]:
|
for example in ["ex1", "ex2"]:
|
||||||
with test_part(
|
with test_part(
|
||||||
self,
|
self,
|
||||||
"test_example_{0}".format(example),
|
f"test_example_{example}",
|
||||||
purpose="run installed {0}".format(example),
|
purpose=f"run installed {example}",
|
||||||
):
|
):
|
||||||
exe = which(join_path(self.prefix.bin, example))
|
exe = which(join_path(self.prefix.bin, example))
|
||||||
exe()
|
exe()
|
||||||
@@ -5226,11 +5271,10 @@ Below illustrates using this feature to compile an example.
|
|||||||
...
|
...
|
||||||
cxx = which(os.environ["CXX"])
|
cxx = which(os.environ["CXX"])
|
||||||
cxx(
|
cxx(
|
||||||
"-L{0}".format(self.prefix.lib),
|
f"-L{self.prefix.lib}",
|
||||||
"-I{0}".format(self.prefix.include),
|
f"-I{self.prefix.include}",
|
||||||
"{0}.cpp".format(exe),
|
f"{exe}.cpp",
|
||||||
"-o",
|
"-o", exe
|
||||||
exe
|
|
||||||
)
|
)
|
||||||
cxx_example = which(exe)
|
cxx_example = which(exe)
|
||||||
cxx_example()
|
cxx_example()
|
||||||
@@ -5247,14 +5291,14 @@ Saving build-time files
|
|||||||
We highly recommend re-using build-time test sources and pared down
|
We highly recommend re-using build-time test sources and pared down
|
||||||
input files for testing installed software. These files are easier
|
input files for testing installed software. These files are easier
|
||||||
to keep synchronized with software capabilities since they reside
|
to keep synchronized with software capabilities since they reside
|
||||||
within the software's repository.
|
within the software's repository.
|
||||||
|
|
||||||
If that is not possible, you can add test-related files to the package
|
If that is not possible, you can add test-related files to the package
|
||||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||||
will be important to maintain them so they work across listed or supported
|
will be important to maintain them so they work across listed or supported
|
||||||
versions of the package.
|
versions of the package.
|
||||||
|
|
||||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||||
and or files from the source build stage directory to the package's
|
and or files from the source build stage directory to the package's
|
||||||
installation directory.
|
installation directory.
|
||||||
|
|
||||||
@@ -5262,10 +5306,15 @@ The signature for ``cache_extra_test_sources`` is:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def cache_extra_test_sources(self, srcs):
|
def cache_extra_test_sources(pkg, srcs):
|
||||||
|
|
||||||
|
where each argument has the following meaning:
|
||||||
|
|
||||||
|
* ``pkg`` is an instance of the package for the spec under test.
|
||||||
|
|
||||||
|
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||||
|
paths of subdirectories and or files needed for stand-alone testing.
|
||||||
|
|
||||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
|
||||||
paths of subdirectories and or files needed for stand-alone testing.
|
|
||||||
The paths must be relative to the staged source directory. Contents of
|
The paths must be relative to the staged source directory. Contents of
|
||||||
subdirectories and files are copied to a special test cache subdirectory
|
subdirectories and files are copied to a special test cache subdirectory
|
||||||
of the installation prefix. They are automatically copied to the appropriate
|
of the installation prefix. They are automatically copied to the appropriate
|
||||||
@@ -5286,21 +5335,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
|||||||
srcs = ["tests",
|
srcs = ["tests",
|
||||||
join_path("examples", "foo.c"),
|
join_path("examples", "foo.c"),
|
||||||
join_path("examples", "bar.c")]
|
join_path("examples", "bar.c")]
|
||||||
self.cache_extra_test_sources(srcs)
|
cache_extra_test_sources(self, srcs)
|
||||||
|
|
||||||
def test_foo(self):
|
def test_foo(self):
|
||||||
exe = "foo"
|
exe = "foo"
|
||||||
src_dir = join_path(
|
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||||
self.test_suite.current_test_cache_dir, "examples"
|
|
||||||
)
|
|
||||||
with working_dir(src_dir):
|
with working_dir(src_dir):
|
||||||
cc = which(os.environ["CC"])
|
cc = which(os.environ["CC"])
|
||||||
cc(
|
cc(
|
||||||
"-L{0}".format(self.prefix.lib),
|
f"-L{self.prefix.lib}",
|
||||||
"-I{0}".format(self.prefix.include),
|
f"-I{self.prefix.include}",
|
||||||
"{0}.c".format(exe),
|
f"{exe}.c",
|
||||||
"-o",
|
"-o", exe
|
||||||
exe
|
|
||||||
)
|
)
|
||||||
foo = which(exe)
|
foo = which(exe)
|
||||||
foo()
|
foo()
|
||||||
@@ -5326,9 +5372,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
|||||||
In our example above, test methods can use the following paths to reference
|
In our example above, test methods can use the following paths to reference
|
||||||
the copy of each entry listed in ``srcs``, respectively:
|
the copy of each entry listed in ``srcs``, respectively:
|
||||||
|
|
||||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
* ``self.test_suite.current_test_cache_dir.tests``
|
||||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||||
|
|
||||||
.. admonition:: Library packages should build stand-alone tests
|
.. admonition:: Library packages should build stand-alone tests
|
||||||
|
|
||||||
@@ -5347,7 +5393,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
|||||||
If one or more of the copied files needs to be modified to reference
|
If one or more of the copied files needs to be modified to reference
|
||||||
the installed software, it is recommended that those changes be made
|
the installed software, it is recommended that those changes be made
|
||||||
to the cached files **once** in the ``copy_test_sources`` method and
|
to the cached files **once** in the ``copy_test_sources`` method and
|
||||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
***after** the call to ``cache_extra_test_sources()``. This will
|
||||||
reduce the amount of unnecessary work in the test method **and** avoid
|
reduce the amount of unnecessary work in the test method **and** avoid
|
||||||
problems testing in shared instances and facility deployments.
|
problems testing in shared instances and facility deployments.
|
||||||
|
|
||||||
@@ -5394,7 +5440,7 @@ property as shown below.
|
|||||||
"""build and run custom-example"""
|
"""build and run custom-example"""
|
||||||
data_dir = self.test_suite.current_test_data_dir
|
data_dir = self.test_suite.current_test_data_dir
|
||||||
exe = "custom-example"
|
exe = "custom-example"
|
||||||
src = datadir.join("{0}.cpp".format(exe))
|
src = datadir.join(f"{exe}.cpp")
|
||||||
...
|
...
|
||||||
# TODO: Build custom-example using src and exe
|
# TODO: Build custom-example using src and exe
|
||||||
...
|
...
|
||||||
@@ -5410,7 +5456,7 @@ Reading expected output from a file
|
|||||||
|
|
||||||
The helper function ``get_escaped_text_output`` is available for packages
|
The helper function ``get_escaped_text_output`` is available for packages
|
||||||
to retrieve and properly format the text from a file that contains the
|
to retrieve and properly format the text from a file that contains the
|
||||||
expected output from running an executable that may contain special
|
expected output from running an executable that may contain special
|
||||||
characters.
|
characters.
|
||||||
|
|
||||||
The signature for ``get_escaped_text_output`` is:
|
The signature for ``get_escaped_text_output`` is:
|
||||||
@@ -5444,7 +5490,7 @@ added to the package's ``test`` subdirectory.
|
|||||||
db_filename, ".dump", output=str.split, error=str.split
|
db_filename, ".dump", output=str.split, error=str.split
|
||||||
)
|
)
|
||||||
for exp in expected:
|
for exp in expected:
|
||||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||||
|
|
||||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||||
source code, the path would be obtained as shown below.
|
source code, the path would be obtained as shown below.
|
||||||
@@ -5457,7 +5503,7 @@ source code, the path would be obtained as shown below.
|
|||||||
db_filename = test_cache_dir.join("packages.db")
|
db_filename = test_cache_dir.join("packages.db")
|
||||||
|
|
||||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||||
as part of the installation process, the test could access the path as
|
as part of the installation process, the test could access the path as
|
||||||
follows:
|
follows:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
@@ -5494,9 +5540,12 @@ Invoking the method is the equivalent of:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
errors = []
|
||||||
for check in expected:
|
for check in expected:
|
||||||
if not re.search(check, actual):
|
if not re.search(check, actual):
|
||||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||||
|
if errors:
|
||||||
|
raise RuntimeError("\n ".join(errors))
|
||||||
|
|
||||||
|
|
||||||
.. _accessing-files:
|
.. _accessing-files:
|
||||||
@@ -5536,7 +5585,7 @@ repository, and installation.
|
|||||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||||
* - Current Spec's Build-time Files
|
* - Current Spec's Build-time Files
|
||||||
- ``self.test_suite.current_test_cache_dir``
|
- ``self.test_suite.current_test_cache_dir``
|
||||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||||
* - Current Spec's Custom Test Files
|
* - Current Spec's Custom Test Files
|
||||||
- ``self.test_suite.current_test_data_dir``
|
- ``self.test_suite.current_test_data_dir``
|
||||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||||
@@ -5551,7 +5600,7 @@ Inheriting stand-alone tests
|
|||||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||||
packages that inherit from or provide interface implementations for those
|
packages that inherit from or provide interface implementations for those
|
||||||
packages, respectively.
|
packages, respectively.
|
||||||
|
|
||||||
The table below summarizes the stand-alone tests that will be executed along
|
The table below summarizes the stand-alone tests that will be executed along
|
||||||
with those implemented in the package itself.
|
with those implemented in the package itself.
|
||||||
@@ -5621,7 +5670,7 @@ for ``openmpi``:
|
|||||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||||
...
|
...
|
||||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||||
==> [2023-03-10-16:04:02.215597]
|
==> [2023-03-10-16:04:02.215597]
|
||||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||||
Openmpi::test_bin_mpirun .. PASSED
|
Openmpi::test_bin_mpirun .. PASSED
|
||||||
Openmpi::test_bin_ompi_info .. PASSED
|
Openmpi::test_bin_ompi_info .. PASSED
|
||||||
@@ -6071,7 +6120,7 @@ in the extra attributes can implement this method like this:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def validate_detected_spec(cls, spec, extra_attributes):
|
def validate_detected_spec(cls, spec, extra_attributes):
|
||||||
"""Check that "compilers" is in the extra attributes."""
|
"""Check that "compilers" is in the extra attributes."""
|
||||||
msg = ("the extra attribute "compilers" must be set for "
|
msg = ("the extra attribute 'compilers' must be set for "
|
||||||
"the detected spec '{0}'".format(spec))
|
"the detected spec '{0}'".format(spec))
|
||||||
assert "compilers" in extra_attributes, msg
|
assert "compilers" in extra_attributes, msg
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==6.2.1
|
sphinx==7.2.5
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.5.0
|
sphinx_design==0.5.0
|
||||||
sphinx-rtd-theme==1.2.2
|
sphinx-rtd-theme==1.3.0
|
||||||
python-levenshtein==0.21.1
|
python-levenshtein==0.21.1
|
||||||
docutils==0.18.1
|
docutils==0.18.1
|
||||||
pygments==2.16.1
|
pygments==2.16.1
|
||||||
urllib3==2.0.4
|
urllib3==2.0.4
|
||||||
pytest==7.4.0
|
pytest==7.4.2
|
||||||
isort==5.12.0
|
isort==5.12.0
|
||||||
black==23.7.0
|
black==23.7.0
|
||||||
flake8==6.1.0
|
flake8==6.1.0
|
||||||
mypy==1.5.0
|
mypy==1.5.1
|
||||||
|
|||||||
@@ -18,11 +18,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from itertools import accumulate
|
||||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.util.symlink
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, symlink
|
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||||
|
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
from spack.util.path import path_to_os_path, system_path_filter
|
from spack.util.path import path_to_os_path, system_path_filter
|
||||||
@@ -101,7 +103,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
# follow symlinks (aka don't not follow symlinks)
|
# follow symlinks (aka don't not follow symlinks)
|
||||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||||
if follow:
|
if follow:
|
||||||
# use the real function if it exists
|
# use the real function if it exists
|
||||||
def lookup(name):
|
def lookup(name):
|
||||||
@@ -169,7 +171,7 @@ def rename(src, dst):
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||||
# so check for that case
|
# so check for that case
|
||||||
if os.path.exists(dst) or os.path.islink(dst):
|
if os.path.exists(dst) or islink(dst):
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
os.rename(src, dst)
|
os.rename(src, dst)
|
||||||
|
|
||||||
@@ -566,7 +568,7 @@ def set_install_permissions(path):
|
|||||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||||
# this function will be invoked on the target. If the file is outside a
|
# this function will be invoked on the target. If the file is outside a
|
||||||
# Spack-maintained prefix, the permissions should not be modified.
|
# Spack-maintained prefix, the permissions should not be modified.
|
||||||
if os.path.islink(path):
|
if islink(path):
|
||||||
return
|
return
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
os.chmod(path, 0o755)
|
os.chmod(path, 0o755)
|
||||||
@@ -635,7 +637,7 @@ def chmod_x(entry, perms):
|
|||||||
@system_path_filter
|
@system_path_filter
|
||||||
def copy_mode(src, dest):
|
def copy_mode(src, dest):
|
||||||
"""Set the mode of dest to that of src unless it is a link."""
|
"""Set the mode of dest to that of src unless it is a link."""
|
||||||
if os.path.islink(dest):
|
if islink(dest):
|
||||||
return
|
return
|
||||||
src_mode = os.stat(src).st_mode
|
src_mode = os.stat(src).st_mode
|
||||||
dest_mode = os.stat(dest).st_mode
|
dest_mode = os.stat(dest).st_mode
|
||||||
@@ -721,26 +723,12 @@ def install(src, dest):
|
|||||||
copy(src, dest, _permissions=True)
|
copy(src, dest, _permissions=True)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def resolve_link_target_relative_to_the_link(link):
|
|
||||||
"""
|
|
||||||
os.path.isdir uses os.path.exists, which for links will check
|
|
||||||
the existence of the link target. If the link target is relative to
|
|
||||||
the link, we need to construct a pathname that is valid from
|
|
||||||
our cwd (which may not be the same as the link's directory)
|
|
||||||
"""
|
|
||||||
target = os.readlink(link)
|
|
||||||
if os.path.isabs(target):
|
|
||||||
return target
|
|
||||||
link_dir = os.path.dirname(os.path.abspath(link))
|
|
||||||
return os.path.join(link_dir, target)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def copy_tree(
|
def copy_tree(
|
||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
|
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -763,6 +751,8 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
|
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||||
|
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -770,6 +760,8 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
|
if allow_broken_symlinks and sys.platform == "win32":
|
||||||
|
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -783,6 +775,11 @@ def copy_tree(
|
|||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise IOError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
|
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||||
|
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||||
|
# symlinks at the end.
|
||||||
|
links = []
|
||||||
|
|
||||||
for src in files:
|
for src in files:
|
||||||
abs_src = os.path.abspath(src)
|
abs_src = os.path.abspath(src)
|
||||||
if not abs_src.endswith(os.path.sep):
|
if not abs_src.endswith(os.path.sep):
|
||||||
@@ -805,7 +802,7 @@ def copy_tree(
|
|||||||
ignore=ignore,
|
ignore=ignore,
|
||||||
follow_nonexisting=True,
|
follow_nonexisting=True,
|
||||||
):
|
):
|
||||||
if os.path.islink(s):
|
if islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = os.readlink(s)
|
||||||
@@ -819,7 +816,9 @@ def escaped_path(path):
|
|||||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||||
target = new_target
|
target = new_target
|
||||||
|
|
||||||
symlink(target, d)
|
links.append((target, d, s))
|
||||||
|
continue
|
||||||
|
|
||||||
elif os.path.isdir(link_target):
|
elif os.path.isdir(link_target):
|
||||||
mkdirp(d)
|
mkdirp(d)
|
||||||
else:
|
else:
|
||||||
@@ -834,9 +833,17 @@ def escaped_path(path):
|
|||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
for target, d, s in links:
|
||||||
|
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||||
|
if _permissions:
|
||||||
|
set_install_permissions(d)
|
||||||
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
def install_tree(
|
||||||
|
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||||
|
):
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -847,12 +854,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
|
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||||
|
On Windows, setting this to True will raise an exception.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
copy_tree(
|
||||||
|
src,
|
||||||
|
dest,
|
||||||
|
symlinks=symlinks,
|
||||||
|
allow_broken_symlinks=allow_broken_symlinks,
|
||||||
|
ignore=ignore,
|
||||||
|
_permissions=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1256,7 +1272,12 @@ def traverse_tree(
|
|||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||||
values are 'pre' and 'post'
|
values are 'pre' and 'post'
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||||
|
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||||
|
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||||
|
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||||
|
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||||
|
paths too.
|
||||||
follow_nonexisting (bool): Whether to descend into directories in
|
follow_nonexisting (bool): Whether to descend into directories in
|
||||||
``src`` that do not exit in ``dest``. Default is True
|
``src`` that do not exit in ``dest``. Default is True
|
||||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||||
@@ -1283,11 +1304,24 @@ def traverse_tree(
|
|||||||
dest_child = os.path.join(dest_path, f)
|
dest_child = os.path.join(dest_path, f)
|
||||||
rel_child = os.path.join(rel_path, f)
|
rel_child = os.path.join(rel_path, f)
|
||||||
|
|
||||||
|
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||||
|
# but only do this if the ignore is defined.
|
||||||
|
if ignore is not None:
|
||||||
|
if islink(source_child) and not follow_links:
|
||||||
|
target = readlink(source_child)
|
||||||
|
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||||
|
if any(map(ignore, all_parents)):
|
||||||
|
tty.warn(
|
||||||
|
f"Skipping {source_path} because the source or a part of the source's "
|
||||||
|
f"path is included in the ignores."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# Treat as a directory
|
# Treat as a directory
|
||||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||||
# target is relative to the link, then that may not resolve properly
|
# target is relative to the link, then that may not resolve properly
|
||||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||||
# When follow_nonexisting isn't set, don't descend into dirs
|
# When follow_nonexisting isn't set, don't descend into dirs
|
||||||
# in source that do not exist in dest
|
# in source that do not exist in dest
|
||||||
if follow_nonexisting or os.path.exists(dest_child):
|
if follow_nonexisting or os.path.exists(dest_child):
|
||||||
@@ -1313,7 +1347,11 @@ def traverse_tree(
|
|||||||
|
|
||||||
def lexists_islink_isdir(path):
|
def lexists_islink_isdir(path):
|
||||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||||
number of stat calls."""
|
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
if not os.path.lexists(path):
|
||||||
|
return False, False, False
|
||||||
|
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||||
# First try to lstat, so we know if it's a link or not.
|
# First try to lstat, so we know if it's a link or not.
|
||||||
try:
|
try:
|
||||||
lst = os.lstat(path)
|
lst = os.lstat(path)
|
||||||
@@ -1528,7 +1566,7 @@ def remove_if_dead_link(path):
|
|||||||
Parameters:
|
Parameters:
|
||||||
path (str): The potential dead link
|
path (str): The potential dead link
|
||||||
"""
|
"""
|
||||||
if os.path.islink(path) and not os.path.exists(path):
|
if islink(path) and not os.path.exists(path):
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
@@ -1587,7 +1625,7 @@ def remove_linked_tree(path):
|
|||||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if os.path.islink(path):
|
if islink(path):
|
||||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
else:
|
else:
|
||||||
@@ -1754,9 +1792,14 @@ def find(root, files, recursive=True):
|
|||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
if recursive:
|
if recursive:
|
||||||
return _find_recursive(root, files)
|
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||||
|
result = _find_recursive(root, files)
|
||||||
else:
|
else:
|
||||||
return _find_non_recursive(root, files)
|
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||||
|
result = _find_non_recursive(root, files)
|
||||||
|
|
||||||
|
tty.debug(f"Find complete: {root} {str(files)}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -2688,7 +2731,7 @@ def remove_directory_contents(dir):
|
|||||||
"""Remove all contents of a directory."""
|
"""Remove all contents of a directory."""
|
||||||
if os.path.exists(dir):
|
if os.path.exists(dir):
|
||||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||||
if os.path.isfile(entry) or os.path.islink(entry):
|
if os.path.isfile(entry) or islink(entry):
|
||||||
os.unlink(entry)
|
os.unlink(entry)
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(entry)
|
shutil.rmtree(entry)
|
||||||
|
|||||||
@@ -2,77 +2,188 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import errno
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from os.path import exists, join
|
|
||||||
|
|
||||||
from llnl.util import lang
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
|
from spack.error import SpackError
|
||||||
|
from spack.util.path import system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
|
is_windows = sys.platform == "win32"
|
||||||
|
|
||||||
def symlink(real_path, link_path):
|
|
||||||
"""
|
|
||||||
Create a symbolic link.
|
|
||||||
|
|
||||||
On Windows, use junctions if os.symlink fails.
|
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||||
"""
|
"""
|
||||||
if sys.platform != "win32":
|
Create a link.
|
||||||
os.symlink(real_path, link_path)
|
|
||||||
elif _win32_can_symlink():
|
On non-Windows and Windows with System Administrator
|
||||||
# Windows requires target_is_directory=True when the target is a dir.
|
privleges this will be a normal symbolic link via
|
||||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
os.symlink.
|
||||||
else:
|
|
||||||
try:
|
On Windows without privledges the link will be a
|
||||||
# Try to use junctions
|
junction for a directory and a hardlink for a file.
|
||||||
_win32_junction(real_path, link_path)
|
On Windows the various link types are:
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.EEXIST:
|
Symbolic Link: A link to a file or directory on the
|
||||||
# EEXIST error indicates that file we're trying to "link"
|
same or different volume (drive letter) or even to
|
||||||
# is already present, don't bother trying to copy which will also fail
|
a remote file or directory (using UNC in its path).
|
||||||
# just raise
|
Need System Administrator privileges to make these.
|
||||||
raise
|
|
||||||
|
Hard Link: A link to a file on the same volume (drive
|
||||||
|
letter) only. Every file (file's data) has at least 1
|
||||||
|
hard link (file's name). But when this method creates
|
||||||
|
a new hard link there will be 2. Deleting all hard
|
||||||
|
links effectively deletes the file. Don't need System
|
||||||
|
Administrator privileges.
|
||||||
|
|
||||||
|
Junction: A link to a directory on the same or different
|
||||||
|
volume (drive letter) but not to a remote directory. Don't
|
||||||
|
need System Administrator privileges.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
source_path (str): The real file or directory that the link points to.
|
||||||
|
Must be absolute OR relative to the link.
|
||||||
|
link_path (str): The path where the link will exist.
|
||||||
|
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||||
|
doesn't exist. This will still raise an exception on Windows.
|
||||||
|
"""
|
||||||
|
source_path = os.path.normpath(source_path)
|
||||||
|
win_source_path = source_path
|
||||||
|
link_path = os.path.normpath(link_path)
|
||||||
|
|
||||||
|
# Never allow broken links on Windows.
|
||||||
|
if sys.platform == "win32" and allow_broken_symlinks:
|
||||||
|
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||||
|
|
||||||
|
if not allow_broken_symlinks:
|
||||||
|
# Perform basic checks to make sure symlinking will succeed
|
||||||
|
if os.path.lexists(link_path):
|
||||||
|
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||||
|
|
||||||
|
if not os.path.exists(source_path):
|
||||||
|
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||||
|
# An absolute source path that does not exist will result in a broken link.
|
||||||
|
raise SymlinkError(
|
||||||
|
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||||
|
f"link would be broken so not making link."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# If all else fails, fall back to copying files
|
# os.symlink can create a link when the given source path is relative to
|
||||||
shutil.copyfile(real_path, link_path)
|
# the link path. Emulate this behavior and check to see if the source exists
|
||||||
|
# relative to the link patg ahead of link creation to prevent broken
|
||||||
|
# links from being made.
|
||||||
|
link_parent_dir = os.path.dirname(link_path)
|
||||||
|
relative_path = os.path.join(link_parent_dir, source_path)
|
||||||
|
if os.path.exists(relative_path):
|
||||||
|
# In order to work on windows, the source path needs to be modified to be
|
||||||
|
# relative because hardlink/junction dont resolve relative paths the same
|
||||||
|
# way as os.symlink. This is ignored on other operating systems.
|
||||||
|
win_source_path = relative_path
|
||||||
|
elif not allow_broken_symlinks:
|
||||||
|
raise SymlinkError(
|
||||||
|
f"The source path ({source_path}) is not relative to the link path "
|
||||||
|
f"({link_path}). Resulting link would be broken so not making link."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the symlink
|
||||||
|
if sys.platform == "win32" and not _windows_can_symlink():
|
||||||
|
_windows_create_link(win_source_path, link_path)
|
||||||
|
else:
|
||||||
|
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||||
|
|
||||||
|
|
||||||
def islink(path):
|
def islink(path: str) -> bool:
|
||||||
return os.path.islink(path) or _win32_is_junction(path)
|
"""Override os.islink to give correct answer for spack logic.
|
||||||
|
|
||||||
|
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||||
|
Windows-only methods will return false for other operating systems.
|
||||||
|
|
||||||
|
For Windows: spack considers symlinks, hard links, and junctions to
|
||||||
|
all be links, so if any of those are True, return True.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): path to check if it is a link.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool - whether the path is any kind link or not.
|
||||||
|
"""
|
||||||
|
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||||
|
|
||||||
|
|
||||||
# '_win32' functions based on
|
def _windows_is_hardlink(path: str) -> bool:
|
||||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
"""Determines if a path is a windows hard link. This is accomplished
|
||||||
def _win32_junction(path, link):
|
by looking at the number of links using os.stat. A non-hard-linked file
|
||||||
# junctions require absolute paths
|
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||||
if not os.path.isabs(link):
|
larger than 1. Note that both the original and hard-linked file will
|
||||||
link = os.path.abspath(link)
|
return True because they share the same inode.
|
||||||
|
|
||||||
# os.symlink will fail if link exists, emulate the behavior here
|
Args:
|
||||||
if exists(link):
|
path (str): Windows path to check for a hard link
|
||||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
|
||||||
|
|
||||||
if not os.path.isabs(path):
|
Returns:
|
||||||
parent = os.path.join(link, os.pardir)
|
bool - Whether the path is a hard link or not.
|
||||||
path = os.path.join(parent, path)
|
"""
|
||||||
path = os.path.abspath(path)
|
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||||
|
return False
|
||||||
|
|
||||||
CreateHardLink(link, path)
|
return os.stat(path).st_nlink > 1
|
||||||
|
|
||||||
|
|
||||||
|
def _windows_is_junction(path: str) -> bool:
|
||||||
|
"""Determines if a path is a windows junction. A junction can be
|
||||||
|
determined using a bitwise AND operation between the file's
|
||||||
|
attribute bitmask and the known junction bitmask (0x400).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): A non-file path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool - whether the path is a junction or not.
|
||||||
|
"""
|
||||||
|
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
import ctypes.wintypes
|
||||||
|
|
||||||
|
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||||
|
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||||
|
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||||
|
|
||||||
|
invalid_file_attributes = 0xFFFFFFFF
|
||||||
|
reparse_point = 0x400
|
||||||
|
file_attr = get_file_attributes(str(path))
|
||||||
|
|
||||||
|
if file_attr == invalid_file_attributes:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return file_attr & reparse_point > 0
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
@lang.memoized
|
||||||
def _win32_can_symlink():
|
def _windows_can_symlink() -> bool:
|
||||||
|
"""
|
||||||
|
Determines if windows is able to make a symlink depending on
|
||||||
|
the system configuration and the level of the user's permissions.
|
||||||
|
"""
|
||||||
|
if sys.platform != "win32":
|
||||||
|
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||||
|
return False
|
||||||
|
|
||||||
tempdir = tempfile.mkdtemp()
|
tempdir = tempfile.mkdtemp()
|
||||||
|
|
||||||
dpath = join(tempdir, "dpath")
|
dpath = os.path.join(tempdir, "dpath")
|
||||||
fpath = join(tempdir, "fpath.txt")
|
fpath = os.path.join(tempdir, "fpath.txt")
|
||||||
|
|
||||||
dlink = join(tempdir, "dlink")
|
dlink = os.path.join(tempdir, "dlink")
|
||||||
flink = join(tempdir, "flink.txt")
|
flink = os.path.join(tempdir, "flink.txt")
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
@@ -96,24 +207,136 @@ def _win32_can_symlink():
|
|||||||
return can_symlink_directories and can_symlink_files
|
return can_symlink_directories and can_symlink_files
|
||||||
|
|
||||||
|
|
||||||
def _win32_is_junction(path):
|
def _windows_create_link(source: str, link: str):
|
||||||
"""
|
"""
|
||||||
Determines if a path is a win32 junction
|
Attempts to create a Hard Link or Junction as an alternative
|
||||||
|
to a symbolic link. This is called when symbolic links cannot
|
||||||
|
be created.
|
||||||
"""
|
"""
|
||||||
if os.path.islink(path):
|
if sys.platform != "win32":
|
||||||
return False
|
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||||
|
elif os.path.isdir(source):
|
||||||
|
_windows_create_junction(source=source, link=link)
|
||||||
|
elif os.path.isfile(source):
|
||||||
|
_windows_create_hard_link(path=source, link=link)
|
||||||
|
else:
|
||||||
|
raise SymlinkError(
|
||||||
|
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||||
|
)
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
import ctypes.wintypes
|
|
||||||
|
|
||||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
def _windows_create_junction(source: str, link: str):
|
||||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
"""Duly verify that the path and link are eligible to create a junction,
|
||||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
then create the junction.
|
||||||
|
"""
|
||||||
|
if sys.platform != "win32":
|
||||||
|
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||||
|
elif not os.path.exists(source):
|
||||||
|
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||||
|
elif os.path.lexists(link):
|
||||||
|
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||||
|
elif not os.path.isdir(source):
|
||||||
|
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||||
|
|
||||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
import subprocess
|
||||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
|
||||||
|
|
||||||
res = GetFileAttributes(path)
|
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
out, err = proc.communicate()
|
||||||
|
tty.debug(out.decode())
|
||||||
|
if proc.returncode != 0:
|
||||||
|
err = err.decode()
|
||||||
|
tty.error(err)
|
||||||
|
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||||
|
|
||||||
return False
|
|
||||||
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
|
"""Duly verify that the path and link are eligible to create a hard
|
||||||
|
link, then create the hard link.
|
||||||
|
"""
|
||||||
|
if sys.platform != "win32":
|
||||||
|
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||||
|
elif not os.path.exists(path):
|
||||||
|
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||||
|
elif os.path.lexists(link):
|
||||||
|
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||||
|
elif not os.path.isfile(path):
|
||||||
|
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||||
|
else:
|
||||||
|
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||||
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
|
def readlink(path: str):
|
||||||
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
|
if _windows_is_hardlink(path):
|
||||||
|
return _windows_read_hard_link(path)
|
||||||
|
elif _windows_is_junction(path):
|
||||||
|
return _windows_read_junction(path)
|
||||||
|
else:
|
||||||
|
return os.readlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
|
"""Find all of the files that point to the same inode as the link"""
|
||||||
|
if sys.platform != "win32":
|
||||||
|
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||||
|
link = os.path.abspath(link)
|
||||||
|
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||||
|
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||||
|
out, err = proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||||
|
|
||||||
|
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||||
|
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||||
|
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||||
|
links.remove(link)
|
||||||
|
if len(links) == 1:
|
||||||
|
return links.pop()
|
||||||
|
elif len(links) > 1:
|
||||||
|
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||||
|
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||||
|
else:
|
||||||
|
raise SymlinkError("Cannot determine hard link source path.")
|
||||||
|
|
||||||
|
|
||||||
|
def _windows_read_junction(link: str):
|
||||||
|
"""Find the path that a junction points to."""
|
||||||
|
if sys.platform != "win32":
|
||||||
|
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||||
|
|
||||||
|
link = os.path.abspath(link)
|
||||||
|
link_basename = os.path.basename(link)
|
||||||
|
link_parent = os.path.dirname(link)
|
||||||
|
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||||
|
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||||
|
out, err = proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||||
|
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||||
|
if matches:
|
||||||
|
return matches.group(1)
|
||||||
|
else:
|
||||||
|
raise SymlinkError("Could not find junction path.")
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def resolve_link_target_relative_to_the_link(link):
|
||||||
|
"""
|
||||||
|
os.path.isdir uses os.path.exists, which for links will check
|
||||||
|
the existence of the link target. If the link target is relative to
|
||||||
|
the link, we need to construct a pathname that is valid from
|
||||||
|
our cwd (which may not be the same as the link's directory)
|
||||||
|
"""
|
||||||
|
target = readlink(link)
|
||||||
|
if os.path.isabs(target):
|
||||||
|
return target
|
||||||
|
link_dir = os.path.dirname(os.path.abspath(link))
|
||||||
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
|
class SymlinkError(SpackError):
|
||||||
|
"""Exception class for errors raised while creating symlinks,
|
||||||
|
junctions and hard links
|
||||||
|
"""
|
||||||
|
|||||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
|||||||
raise RuntimeError("file argument must be set by __init__ ")
|
raise RuntimeError("file argument must be set by __init__ ")
|
||||||
|
|
||||||
# Open both write and reading on logfile
|
# Open both write and reading on logfile
|
||||||
if type(self.logfile) == io.StringIO:
|
if isinstance(self.logfile, io.StringIO):
|
||||||
self._ioflag = True
|
self._ioflag = True
|
||||||
# cannot have two streams on tempfile, so we must make our own
|
# cannot have two streams on tempfile, so we must make our own
|
||||||
sys.stdout = self.logfile
|
sys.stdout = self.logfile
|
||||||
|
|||||||
@@ -9,7 +9,6 @@
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import multiprocessing.pool
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -49,6 +48,7 @@
|
|||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.caches import misc_cache_location
|
from spack.caches import misc_cache_location
|
||||||
@@ -876,32 +876,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
|||||||
db: A spack database used for adding specs and then writing the index.
|
db: A spack database used for adding specs and then writing the index.
|
||||||
temp_dir (str): Location to write index.json and hash for pushing
|
temp_dir (str): Location to write index.json and hash for pushing
|
||||||
concurrency (int): Number of parallel processes to use when fetching
|
concurrency (int): Number of parallel processes to use when fetching
|
||||||
|
|
||||||
Return:
|
|
||||||
None
|
|
||||||
"""
|
"""
|
||||||
|
for file in file_list:
|
||||||
|
contents = read_method(file)
|
||||||
|
# Need full spec.json name or this gets confused with index.json.
|
||||||
|
if file.endswith(".json.sig"):
|
||||||
|
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||||
|
fetched_spec = Spec.from_dict(specfile_json)
|
||||||
|
elif file.endswith(".json"):
|
||||||
|
fetched_spec = Spec.from_json(contents)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
def _fetch_spec_from_mirror(spec_url):
|
|
||||||
spec_file_contents = read_method(spec_url)
|
|
||||||
|
|
||||||
if spec_file_contents:
|
|
||||||
# Need full spec.json name or this gets confused with index.json.
|
|
||||||
if spec_url.endswith(".json.sig"):
|
|
||||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
|
||||||
return Spec.from_dict(specfile_json)
|
|
||||||
if spec_url.endswith(".json"):
|
|
||||||
return Spec.from_json(spec_file_contents)
|
|
||||||
|
|
||||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
|
||||||
try:
|
|
||||||
fetched_specs = tp.map(
|
|
||||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
tp.terminate()
|
|
||||||
tp.join()
|
|
||||||
|
|
||||||
for fetched_spec in fetched_specs:
|
|
||||||
db.add(fetched_spec, None)
|
db.add(fetched_spec, None)
|
||||||
db.mark(fetched_spec, "in_buildcache", True)
|
db.mark(fetched_spec, "in_buildcache", True)
|
||||||
|
|
||||||
@@ -1813,10 +1799,11 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
|||||||
m.linkname = m.linkname[result.end() :]
|
m.linkname = m.linkname[result.end() :]
|
||||||
|
|
||||||
|
|
||||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||||
"""
|
"""
|
||||||
extract binary tarball for given package into install area
|
extract binary tarball for given package into install area
|
||||||
"""
|
"""
|
||||||
|
timer.start("extract")
|
||||||
if os.path.exists(spec.prefix):
|
if os.path.exists(spec.prefix):
|
||||||
if force:
|
if force:
|
||||||
shutil.rmtree(spec.prefix)
|
shutil.rmtree(spec.prefix)
|
||||||
@@ -1896,7 +1883,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
|||||||
|
|
||||||
os.remove(tarfile_path)
|
os.remove(tarfile_path)
|
||||||
os.remove(specfile_path)
|
os.remove(specfile_path)
|
||||||
|
timer.stop("extract")
|
||||||
|
|
||||||
|
timer.start("relocate")
|
||||||
try:
|
try:
|
||||||
relocate_package(spec)
|
relocate_package(spec)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1917,6 +1906,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
|||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
_delete_staged_downloads(download_result)
|
_delete_staged_downloads(download_result)
|
||||||
|
timer.stop("relocate")
|
||||||
|
|
||||||
|
|
||||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||||
@@ -2383,22 +2373,12 @@ def __init__(self, all_architectures):
|
|||||||
|
|
||||||
self.possible_specs = specs
|
self.possible_specs = specs
|
||||||
|
|
||||||
def __call__(self, spec, **kwargs):
|
def __call__(self, spec: Spec, **kwargs):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec (str): The spec being searched for in its string representation or hash.
|
spec: The spec being searched for
|
||||||
"""
|
"""
|
||||||
matches = []
|
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||||
if spec.startswith("/"):
|
|
||||||
# Matching a DAG hash
|
|
||||||
query_hash = spec.replace("/", "")
|
|
||||||
for candidate_spec in self.possible_specs:
|
|
||||||
if candidate_spec.dag_hash().startswith(query_hash):
|
|
||||||
matches.append(candidate_spec)
|
|
||||||
else:
|
|
||||||
# Matching a spec constraint
|
|
||||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
|
||||||
return matches
|
|
||||||
|
|
||||||
|
|
||||||
class FetchIndexError(Exception):
|
class FetchIndexError(Exception):
|
||||||
|
|||||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
|||||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||||
]
|
]
|
||||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
platform_scope = spack.config.ConfigScope(
|
platform_scope = spack.config.ConfigScope(
|
||||||
|
|||||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
|||||||
def _add_externals_if_missing() -> None:
|
def _add_externals_if_missing() -> None:
|
||||||
search_list = [
|
search_list = [
|
||||||
# clingo
|
# clingo
|
||||||
spack.repo.PATH.get_pkg_class("cmake"),
|
"cmake",
|
||||||
spack.repo.PATH.get_pkg_class("bison"),
|
"bison",
|
||||||
# GnuPG
|
# GnuPG
|
||||||
spack.repo.PATH.get_pkg_class("gawk"),
|
"gawk",
|
||||||
|
# develop deps
|
||||||
|
"git",
|
||||||
]
|
]
|
||||||
if IS_WINDOWS:
|
if IS_WINDOWS:
|
||||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
search_list.append("winbison")
|
||||||
detected_packages = spack.detection.by_executable(search_list)
|
externals = spack.detection.by_path(search_list)
|
||||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||||
|
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||||
|
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||||
|
spack.detection.update_configuration(
|
||||||
|
non_buildable_externals, scope="bootstrap", buildable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def clingo_root_spec() -> str:
|
def clingo_root_spec() -> str:
|
||||||
|
|||||||
@@ -15,14 +15,15 @@
|
|||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.build_environment
|
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
|
import spack.util.cpus
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.environment import depfile
|
from spack.environment import depfile
|
||||||
|
|
||||||
from ._common import _root_spec
|
from ._common import _root_spec
|
||||||
from .config import root_path, spec_for_current_python, store_path
|
from .config import root_path, spec_for_current_python, store_path
|
||||||
|
from .core import _add_externals_if_missing
|
||||||
|
|
||||||
|
|
||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
@@ -136,7 +137,7 @@ def _install_with_depfile(self) -> None:
|
|||||||
"-C",
|
"-C",
|
||||||
str(self.environment_root()),
|
str(self.environment_root()),
|
||||||
"-j",
|
"-j",
|
||||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
|||||||
|
|
||||||
def ensure_environment_dependencies() -> None:
|
def ensure_environment_dependencies() -> None:
|
||||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||||
|
_add_externals_if_missing()
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.update_syspath_and_environ()
|
env.update_syspath_and_environ()
|
||||||
|
|||||||
@@ -68,7 +68,7 @@
|
|||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import spack_install_test_log
|
from spack.install_test import spack_install_test_log
|
||||||
from spack.installer import InstallError
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIRS,
|
SYSTEM_DIRS,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
@@ -537,39 +537,6 @@ def update_compiler_args_for_dep(dep):
|
|||||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||||
|
|
||||||
|
|
||||||
def determine_number_of_jobs(
|
|
||||||
parallel=False, command_line=None, config_default=None, max_cpus=None
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
|
||||||
number of jobs set on the command line. If not set, then we use the config
|
|
||||||
defaults (which is usually set through the builtin config scope), but we
|
|
||||||
cap to the number of CPUs available to avoid oversubscription.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
parallel (bool or None): true when package supports parallel builds
|
|
||||||
command_line (int or None): command line override
|
|
||||||
config_default (int or None): config default number of jobs
|
|
||||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
|
||||||
value is automatically determined.
|
|
||||||
"""
|
|
||||||
if not parallel:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if command_line is None and "command_line" in spack.config.scopes():
|
|
||||||
command_line = spack.config.get("config:build_jobs", scope="command_line")
|
|
||||||
|
|
||||||
if command_line is not None:
|
|
||||||
return command_line
|
|
||||||
|
|
||||||
max_cpus = max_cpus or cpus_available()
|
|
||||||
|
|
||||||
# in some rare cases _builtin config may not be set, so default to max 16
|
|
||||||
config_default = config_default or spack.config.get("config:build_jobs", 16)
|
|
||||||
|
|
||||||
return min(max_cpus, config_default)
|
|
||||||
|
|
||||||
|
|
||||||
def set_module_variables_for_package(pkg):
|
def set_module_variables_for_package(pkg):
|
||||||
"""Populate the Python module of a package with some useful global names.
|
"""Populate the Python module of a package with some useful global names.
|
||||||
This makes things easier for package writers.
|
This makes things easier for package writers.
|
||||||
@@ -1027,7 +994,7 @@ def get_cmake_prefix_path(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||||
):
|
):
|
||||||
context = kwargs.get("context", "build")
|
context = kwargs.get("context", "build")
|
||||||
|
|
||||||
@@ -1048,12 +1015,12 @@ def _setup_pkg_and_run(
|
|||||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||||
)
|
)
|
||||||
return_value = function(pkg, kwargs)
|
return_value = function(pkg, kwargs)
|
||||||
child_pipe.send(return_value)
|
write_pipe.send(return_value)
|
||||||
|
|
||||||
except StopPhase as e:
|
except StopPhase as e:
|
||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
child_pipe.send(e)
|
write_pipe.send(e)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
exc_type, exc, tb = sys.exc_info()
|
exc_type, exc, tb = sys.exc_info()
|
||||||
@@ -1102,10 +1069,10 @@ def _setup_pkg_and_run(
|
|||||||
context,
|
context,
|
||||||
package_context,
|
package_context,
|
||||||
)
|
)
|
||||||
child_pipe.send(ce)
|
write_pipe.send(ce)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
child_pipe.close()
|
write_pipe.close()
|
||||||
if input_multiprocess_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
@@ -1149,7 +1116,7 @@ def child_fun():
|
|||||||
For more information on `multiprocessing` child process creation
|
For more information on `multiprocessing` child process creation
|
||||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||||
"""
|
"""
|
||||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
input_multiprocess_fd = None
|
input_multiprocess_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
@@ -1174,7 +1141,7 @@ def child_fun():
|
|||||||
serialized_pkg,
|
serialized_pkg,
|
||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
child_pipe,
|
write_pipe,
|
||||||
input_multiprocess_fd,
|
input_multiprocess_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
@@ -1183,6 +1150,12 @@ def child_fun():
|
|||||||
|
|
||||||
p.start()
|
p.start()
|
||||||
|
|
||||||
|
# We close the writable end of the pipe now to be sure that p is the
|
||||||
|
# only process which owns a handle for it. This ensures that when p
|
||||||
|
# closes its handle for the writable end, read_pipe.recv() will
|
||||||
|
# promptly report the readable end as being ready.
|
||||||
|
write_pipe.close()
|
||||||
|
|
||||||
except InstallError as e:
|
except InstallError as e:
|
||||||
e.pkg = pkg
|
e.pkg = pkg
|
||||||
raise
|
raise
|
||||||
@@ -1192,7 +1165,16 @@ def child_fun():
|
|||||||
if input_multiprocess_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
child_result = parent_pipe.recv()
|
def exitcode_msg(p):
|
||||||
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
|
return f"{typ} {abs(p.exitcode)}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
child_result = read_pipe.recv()
|
||||||
|
except EOFError:
|
||||||
|
p.join()
|
||||||
|
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||||
|
|
||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
# If returns a StopPhase, raise it
|
# If returns a StopPhase, raise it
|
||||||
@@ -1212,6 +1194,10 @@ def child_fun():
|
|||||||
child_result.print_context()
|
child_result.print_context()
|
||||||
raise child_result
|
raise child_result
|
||||||
|
|
||||||
|
# Fallback. Usually caught beforehand in EOFError above.
|
||||||
|
if p.exitcode != 0:
|
||||||
|
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||||
|
|
||||||
return child_result
|
return child_result
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
|||||||
setattr(self, "configure_flag_args", [])
|
setattr(self, "configure_flag_args", [])
|
||||||
for flag, values in flags.items():
|
for flag, values in flags.items():
|
||||||
if values:
|
if values:
|
||||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||||
|
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||||
self.configure_flag_args.append(values_str)
|
self.configure_flag_args.append(values_str)
|
||||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||||
# additionaly set FCFLAGS if required.
|
# additionaly set FCFLAGS if required.
|
||||||
|
|||||||
@@ -274,7 +274,6 @@ def std_args(pkg, generator=None):
|
|||||||
generator,
|
generator,
|
||||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||||
define("CMAKE_BUILD_TYPE", build_type),
|
define("CMAKE_BUILD_TYPE", build_type),
|
||||||
define("BUILD_TESTING", pkg.run_tests),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||||
@@ -451,7 +450,6 @@ def cmake_args(self):
|
|||||||
|
|
||||||
* CMAKE_INSTALL_PREFIX
|
* CMAKE_INSTALL_PREFIX
|
||||||
* CMAKE_BUILD_TYPE
|
* CMAKE_BUILD_TYPE
|
||||||
* BUILD_TESTING
|
|
||||||
|
|
||||||
which will be set automatically.
|
which will be set automatically.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
|
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ def makefile_root(self):
|
|||||||
return self.stage.source_path
|
return self.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nmakefile_name(self):
|
def makefile_name(self):
|
||||||
"""Name of the current makefile. This is currently an empty value.
|
"""Name of the current makefile. This is currently an empty value.
|
||||||
If a project defines this value, it will be used with the /f argument
|
If a project defines this value, it will be used with the /f argument
|
||||||
to provide nmake an explicit makefile. This is usefule in scenarios where
|
to provide nmake an explicit makefile. This is usefule in scenarios where
|
||||||
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
|
|||||||
"""Run "nmake" on the build targets specified by the builder."""
|
"""Run "nmake" on the build targets specified by the builder."""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
if self.nmakefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/f {}".format(self.nmakefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||||
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.nmakefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/f {}".format(self.nmakefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", prefix))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
|
|||||||
@@ -300,8 +300,8 @@ def get_external_python_for_prefix(self):
|
|||||||
if python_externals_configured:
|
if python_externals_configured:
|
||||||
return python_externals_configured[0]
|
return python_externals_configured[0]
|
||||||
|
|
||||||
python_externals_detection = spack.detection.by_executable(
|
python_externals_detection = spack.detection.by_path(
|
||||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
["python"], path_hints=[self.spec.external_path]
|
||||||
)
|
)
|
||||||
|
|
||||||
python_externals_detected = [
|
python_externals_detected = [
|
||||||
|
|||||||
@@ -10,9 +10,10 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
@@ -92,7 +93,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
"--copy",
|
"--copy",
|
||||||
"-i",
|
"-i",
|
||||||
"-j",
|
"-j",
|
||||||
str(determine_number_of_jobs(parallel)),
|
str(determine_number_of_jobs(parallel=parallel)),
|
||||||
"--",
|
"--",
|
||||||
os.getcwd(),
|
os.getcwd(),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -20,9 +20,9 @@
|
|||||||
|
|
||||||
|
|
||||||
def misc_cache_location():
|
def misc_cache_location():
|
||||||
"""The ``misc_cache`` is Spack's cache for small data.
|
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||||
|
|
||||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||||
providers and for which packages provide which tags.
|
providers and for which packages provide which tags.
|
||||||
"""
|
"""
|
||||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
misc_cache: Union[
|
MISC_CACHE: Union[
|
||||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||||
] = llnl.util.lang.Singleton(_misc_cache)
|
] = llnl.util.lang.Singleton(_misc_cache)
|
||||||
|
|
||||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's local cache for downloaded source archives
|
#: Spack's local cache for downloaded source archives
|
||||||
fetch_cache: Union[
|
FETCH_CACHE: Union[
|
||||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||||
|
|||||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
header = "%s{%s} / %s{%s}" % (
|
header = "%s{%s} / %s{%s}" % (
|
||||||
spack.spec.architecture_color,
|
spack.spec.ARCHITECTURE_COLOR,
|
||||||
architecture if architecture else "no arch",
|
architecture if architecture else "no arch",
|
||||||
spack.spec.compiler_color,
|
spack.spec.COMPILER_COLOR,
|
||||||
f"{compiler.display_str}" if compiler else "no compiler",
|
f"{compiler.display_str}" if compiler else "no compiler",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -69,11 +69,10 @@
|
|||||||
|
|
||||||
def _add_scope_option(parser):
|
def _add_scope_option(parser):
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
help="configuration scope to read/modify",
|
help="configuration scope to read/modify",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -170,7 +169,7 @@ def _reset(args):
|
|||||||
if not ok_to_continue:
|
if not ok_to_continue:
|
||||||
raise RuntimeError("Aborting")
|
raise RuntimeError("Aborting")
|
||||||
|
|
||||||
for scope in spack.config.config.file_scopes:
|
for scope in spack.config.CONFIG.file_scopes:
|
||||||
# The default scope should stay untouched
|
# The default scope should stay untouched
|
||||||
if scope.name == "defaults":
|
if scope.name == "defaults":
|
||||||
continue
|
continue
|
||||||
@@ -187,7 +186,7 @@ def _reset(args):
|
|||||||
if os.path.exists(bootstrap_yaml):
|
if os.path.exists(bootstrap_yaml):
|
||||||
shutil.move(bootstrap_yaml, backup_file)
|
shutil.move(bootstrap_yaml, backup_file)
|
||||||
|
|
||||||
spack.config.config.clear_caches()
|
spack.config.CONFIG.clear_caches()
|
||||||
|
|
||||||
|
|
||||||
def _root(args):
|
def _root(args):
|
||||||
|
|||||||
@@ -20,6 +20,7 @@
|
|||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.error
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@@ -78,6 +79,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||||
"dependencies",
|
"dependencies",
|
||||||
)
|
)
|
||||||
|
push.add_argument(
|
||||||
|
"--fail-fast",
|
||||||
|
action="store_true",
|
||||||
|
help="stop pushing on first failure (default is best effort)",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(push, ["specs"])
|
arguments.add_common_arguments(push, ["specs"])
|
||||||
push.set_defaults(func=push_fn)
|
push.set_defaults(func=push_fn)
|
||||||
|
|
||||||
@@ -149,12 +155,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
|
|
||||||
# used to construct scope arguments below
|
# used to construct scope arguments below
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
check.add_argument(
|
check.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope containing mirrors to check",
|
help="configuration scope containing mirrors to check",
|
||||||
)
|
)
|
||||||
@@ -297,6 +302,7 @@ def push_fn(args):
|
|||||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||||
|
|
||||||
skipped = []
|
skipped = []
|
||||||
|
failed = []
|
||||||
|
|
||||||
# tty printing
|
# tty printing
|
||||||
color = clr.get_color_when()
|
color = clr.get_color_when()
|
||||||
@@ -327,11 +333,17 @@ def push_fn(args):
|
|||||||
except bindist.NoOverwriteException:
|
except bindist.NoOverwriteException:
|
||||||
skipped.append(format_spec(spec))
|
skipped.append(format_spec(spec))
|
||||||
|
|
||||||
|
# Catch any other exception unless the fail fast option is set
|
||||||
|
except Exception as e:
|
||||||
|
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||||
|
raise
|
||||||
|
failed.append((format_spec(spec), e))
|
||||||
|
|
||||||
if skipped:
|
if skipped:
|
||||||
if len(specs) == 1:
|
if len(specs) == 1:
|
||||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||||
elif len(skipped) == len(specs):
|
elif len(skipped) == len(specs):
|
||||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||||
else:
|
else:
|
||||||
tty.info(
|
tty.info(
|
||||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||||
@@ -341,6 +353,17 @@ def push_fn(args):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
if len(failed) == 1:
|
||||||
|
raise failed[0][1]
|
||||||
|
|
||||||
|
raise spack.error.SpackError(
|
||||||
|
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||||
|
"\n".join(
|
||||||
|
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def install_fn(args):
|
def install_fn(args):
|
||||||
"""install from a binary package"""
|
"""install from a binary package"""
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
|
|
||||||
@@ -253,6 +254,8 @@ def ci_rebuild(args):
|
|||||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||||
not contain the hash
|
not contain the hash
|
||||||
"""
|
"""
|
||||||
|
rebuild_timer = timer.Timer()
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||||
|
|
||||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||||
@@ -736,6 +739,14 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
print(reproduce_msg)
|
print(reproduce_msg)
|
||||||
|
|
||||||
|
rebuild_timer.stop()
|
||||||
|
try:
|
||||||
|
with open("install_timers.json", "w") as timelog:
|
||||||
|
extra_attributes = {"name": ".ci-rebuild"}
|
||||||
|
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
|
except Exception as e:
|
||||||
|
tty.debug(str(e))
|
||||||
|
|
||||||
# Tie job success/failure to the success/failure of building the spec
|
# Tie job success/failure to the success/failure of building the spec
|
||||||
return install_exit_code
|
return install_exit_code
|
||||||
|
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ def clean(parser, args):
|
|||||||
|
|
||||||
if args.downloads:
|
if args.downloads:
|
||||||
tty.msg("Removing cached downloads")
|
tty.msg("Removing cached downloads")
|
||||||
spack.caches.fetch_cache.destroy()
|
spack.caches.FETCH_CACHE.destroy()
|
||||||
|
|
||||||
if args.failures:
|
if args.failures:
|
||||||
tty.msg("Removing install failure marks")
|
tty.msg("Removing install failure marks")
|
||||||
@@ -126,7 +126,7 @@ def clean(parser, args):
|
|||||||
|
|
||||||
if args.misc_cache:
|
if args.misc_cache:
|
||||||
tty.msg("Removing cached information on repositories")
|
tty.msg("Removing cached information on repositories")
|
||||||
spack.caches.misc_cache.destroy()
|
spack.caches.MISC_CACHE.destroy()
|
||||||
|
|
||||||
if args.python_cache:
|
if args.python_cache:
|
||||||
tty.msg("Removing python cache files")
|
tty.msg("Removing python cache files")
|
||||||
|
|||||||
@@ -812,6 +812,9 @@ def bash(args: Namespace, out: IO) -> None:
|
|||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
spack.main.add_all_commands(parser)
|
spack.main.add_all_commands(parser)
|
||||||
|
|
||||||
|
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||||
|
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||||
|
|
||||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||||
writer.write(parser)
|
writer.write(parser)
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
|||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||||
|
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
# Find
|
# Find
|
||||||
find_parser = sp.add_parser(
|
find_parser = sp.add_parser(
|
||||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
|||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope("compilers"),
|
default=spack.config.default_modify_scope("compilers"),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
|||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=None,
|
default=None,
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
|||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_list_scope(),
|
default=spack.config.default_list_scope(),
|
||||||
help="configuration scope to read from",
|
help="configuration scope to read from",
|
||||||
)
|
)
|
||||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
|||||||
info_parser.add_argument(
|
info_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_list_scope(),
|
default=spack.config.default_list_scope(),
|
||||||
help="configuration scope to read from",
|
help="configuration scope to read from",
|
||||||
)
|
)
|
||||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
|||||||
n = len(new_compilers)
|
n = len(new_compilers)
|
||||||
s = "s" if n > 1 else ""
|
s = "s" if n > 1 else ""
|
||||||
|
|
||||||
config = spack.config.config
|
config = spack.config.CONFIG
|
||||||
filename = config.get_config_filename(args.scope, "compilers")
|
filename = config.get_config_filename(args.scope, "compilers")
|
||||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||||
@@ -186,7 +185,7 @@ def compiler_list(args):
|
|||||||
os_str = os
|
os_str = os
|
||||||
if target:
|
if target:
|
||||||
os_str += "-%s" % target
|
os_str += "-%s" % target
|
||||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||||
tty.hline(colorize(cname), char="-")
|
tty.hline(colorize(cname), char="-")
|
||||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||||
|
|
||||||
|
|||||||
@@ -13,12 +13,11 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
help="configuration scope to read/modify",
|
help="configuration scope to read/modify",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -27,13 +27,12 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
# User can only choose one
|
# User can only choose one
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
help="configuration scope to read/modify",
|
help="configuration scope to read/modify",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
|||||||
help="configuration section to print\n\noptions: %(choices)s",
|
help="configuration section to print\n\noptions: %(choices)s",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
metavar="section",
|
metavar="section",
|
||||||
choices=spack.config.section_schemas,
|
choices=spack.config.SECTION_SCHEMAS,
|
||||||
)
|
)
|
||||||
|
|
||||||
blame_parser = sp.add_parser(
|
blame_parser = sp.add_parser(
|
||||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
|||||||
"section",
|
"section",
|
||||||
help="configuration section to print\n\noptions: %(choices)s",
|
help="configuration section to print\n\noptions: %(choices)s",
|
||||||
metavar="section",
|
metavar="section",
|
||||||
choices=spack.config.section_schemas,
|
choices=spack.config.SECTION_SCHEMAS,
|
||||||
)
|
)
|
||||||
|
|
||||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
|||||||
help="configuration section to edit\n\noptions: %(choices)s",
|
help="configuration section to edit\n\noptions: %(choices)s",
|
||||||
metavar="section",
|
metavar="section",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
choices=spack.config.section_schemas,
|
choices=spack.config.SECTION_SCHEMAS,
|
||||||
)
|
)
|
||||||
edit_parser.add_argument(
|
edit_parser.add_argument(
|
||||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||||
@@ -146,10 +145,10 @@ def config_get(args):
|
|||||||
scope, section = _get_scope_and_section(args)
|
scope, section = _get_scope_and_section(args)
|
||||||
|
|
||||||
if section is not None:
|
if section is not None:
|
||||||
spack.config.config.print_section(section)
|
spack.config.CONFIG.print_section(section)
|
||||||
|
|
||||||
elif scope and scope.startswith("env:"):
|
elif scope and scope.startswith("env:"):
|
||||||
config_file = spack.config.config.get_config_filename(scope, section)
|
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||||
if os.path.exists(config_file):
|
if os.path.exists(config_file):
|
||||||
with open(config_file) as f:
|
with open(config_file) as f:
|
||||||
print(f.read())
|
print(f.read())
|
||||||
@@ -162,7 +161,7 @@ def config_get(args):
|
|||||||
|
|
||||||
def config_blame(args):
|
def config_blame(args):
|
||||||
"""Print out line-by-line blame of merged YAML."""
|
"""Print out line-by-line blame of merged YAML."""
|
||||||
spack.config.config.print_section(args.section, blame=True)
|
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||||
|
|
||||||
|
|
||||||
def config_edit(args):
|
def config_edit(args):
|
||||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
|||||||
scope, section = _get_scope_and_section(args)
|
scope, section = _get_scope_and_section(args)
|
||||||
if not scope and not section:
|
if not scope and not section:
|
||||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||||
config_file = spack.config.config.get_config_filename(scope, section)
|
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||||
|
|
||||||
if args.print_file:
|
if args.print_file:
|
||||||
print(config_file)
|
print(config_file)
|
||||||
@@ -194,7 +193,7 @@ def config_list(args):
|
|||||||
|
|
||||||
Used primarily for shell tab completion scripts.
|
Used primarily for shell tab completion scripts.
|
||||||
"""
|
"""
|
||||||
print(" ".join(list(spack.config.section_schemas)))
|
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||||
|
|
||||||
|
|
||||||
def config_add(args):
|
def config_add(args):
|
||||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
|||||||
|
|
||||||
def config_update(args):
|
def config_update(args):
|
||||||
# Read the configuration files
|
# Read the configuration files
|
||||||
spack.config.config.get_config(args.section, scope=args.scope)
|
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||||
updates: List[spack.config.ConfigScope] = list(
|
updates: List[spack.config.ConfigScope] = list(
|
||||||
filter(
|
filter(
|
||||||
lambda s: not isinstance(
|
lambda s: not isinstance(
|
||||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||||
),
|
),
|
||||||
spack.config.config.format_updates[args.section],
|
spack.config.CONFIG.format_updates[args.section],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
cannot_overwrite, skip_system_scope = [], False
|
cannot_overwrite, skip_system_scope = [], False
|
||||||
for scope in updates:
|
for scope in updates:
|
||||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||||
if not can_be_updated:
|
if not can_be_updated:
|
||||||
if scope.name == "system":
|
if scope.name == "system":
|
||||||
@@ -302,7 +301,7 @@ def config_update(args):
|
|||||||
" the latest schema format:\n\n"
|
" the latest schema format:\n\n"
|
||||||
)
|
)
|
||||||
for scope in updates:
|
for scope in updates:
|
||||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||||
msg += (
|
msg += (
|
||||||
"\nIf the configuration files are updated, versions of Spack "
|
"\nIf the configuration files are updated, versions of Spack "
|
||||||
@@ -325,7 +324,7 @@ def config_update(args):
|
|||||||
# Make a backup copy and rewrite the file
|
# Make a backup copy and rewrite the file
|
||||||
bkp_file = cfg_file + ".bkp"
|
bkp_file = cfg_file + ".bkp"
|
||||||
shutil.copy(cfg_file, bkp_file)
|
shutil.copy(cfg_file, bkp_file)
|
||||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||||
|
|
||||||
|
|
||||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
|||||||
|
|
||||||
|
|
||||||
def config_revert(args):
|
def config_revert(args):
|
||||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||||
|
|
||||||
# Search for backup files in the configuration scopes
|
# Search for backup files in the configuration scopes
|
||||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||||
to_be_restored, cannot_overwrite = [], []
|
to_be_restored, cannot_overwrite = [], []
|
||||||
for scope in scopes:
|
for scope in scopes:
|
||||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||||
bkp_file = cfg_file + ".bkp"
|
bkp_file = cfg_file + ".bkp"
|
||||||
|
|
||||||
# If the backup files doesn't exist move to the next scope
|
# If the backup files doesn't exist move to the next scope
|
||||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
|||||||
existing = spack.config.get("packages", scope=scope)
|
existing = spack.config.get("packages", scope=scope)
|
||||||
new = spack.config.merge_yaml(existing, pkgs)
|
new = spack.config.merge_yaml(existing, pkgs)
|
||||||
spack.config.set("packages", new, scope)
|
spack.config.set("packages", new, scope)
|
||||||
config_file = spack.config.config.get_config_filename(scope, section)
|
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||||
|
|
||||||
tty.msg("Updated config at {0}".format(config_file))
|
tty.msg("Updated config at {0}".format(config_file))
|
||||||
|
|
||||||
|
|||||||
@@ -239,6 +239,13 @@ def env_deactivate_setup_parser(subparser):
|
|||||||
const="bat",
|
const="bat",
|
||||||
help="print bat commands to activate the environment",
|
help="print bat commands to activate the environment",
|
||||||
)
|
)
|
||||||
|
shells.add_argument(
|
||||||
|
"--pwsh",
|
||||||
|
action="store_const",
|
||||||
|
dest="shell",
|
||||||
|
const="pwsh",
|
||||||
|
help="print pwsh commands to activate the environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_deactivate(args):
|
def env_deactivate(args):
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.colify as colify
|
import llnl.util.tty.colify as colify
|
||||||
@@ -13,6 +14,7 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
|
import spack.config
|
||||||
import spack.cray_manifest as cray_manifest
|
import spack.cray_manifest as cray_manifest
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -27,7 +29,6 @@ def setup_parser(subparser):
|
|||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||||
|
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
@@ -47,14 +48,14 @@ def setup_parser(subparser):
|
|||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope("packages"),
|
default=spack.config.default_modify_scope("packages"),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||||
)
|
)
|
||||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
|
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||||
find_parser.epilog = (
|
find_parser.epilog = (
|
||||||
'The search is by default on packages tagged with the "build-tools" or '
|
'The search is by default on packages tagged with the "build-tools" or '
|
||||||
@@ -120,52 +121,29 @@ def external_find(args):
|
|||||||
else:
|
else:
|
||||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||||
|
|
||||||
# If the user didn't specify anything, search for build tools by default
|
# Outside the Cray manifest, the search is done by tag for performance reasons,
|
||||||
if not args.tags and not args.all and not args.packages:
|
# since tags are cached.
|
||||||
args.tags = ["core-packages", "build-tools"]
|
|
||||||
|
|
||||||
# If the user specified both --all and --tag, then --all has precedence
|
# If the user specified both --all and --tag, then --all has precedence
|
||||||
if args.all and args.tags:
|
if args.all or args.packages:
|
||||||
args.tags = []
|
# Each detectable package has at least the detectable tag
|
||||||
|
args.tags = ["detectable"]
|
||||||
|
elif not args.tags:
|
||||||
|
# If the user didn't specify anything, search for build tools by default
|
||||||
|
args.tags = ["core-packages", "build-tools"]
|
||||||
|
|
||||||
# Construct the list of possible packages to be detected
|
candidate_packages = packages_to_search_for(
|
||||||
pkg_cls_to_check = []
|
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||||
|
)
|
||||||
# Add the packages that have been required explicitly
|
detected_packages = spack.detection.by_path(
|
||||||
if args.packages:
|
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
)
|
||||||
if args.tags:
|
|
||||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
|
||||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
|
||||||
|
|
||||||
if args.tags and not pkg_cls_to_check:
|
|
||||||
# If we arrived here we didn't have any explicit package passed
|
|
||||||
# as argument, which means to search all packages.
|
|
||||||
# Since tags are cached it's much faster to construct what we need
|
|
||||||
# to search directly, rather than filtering after the fact
|
|
||||||
pkg_cls_to_check = [
|
|
||||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
|
||||||
for tag in args.tags
|
|
||||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
|
||||||
]
|
|
||||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
|
||||||
|
|
||||||
# If the list of packages is empty, search for every possible package
|
|
||||||
if not args.tags and not pkg_cls_to_check:
|
|
||||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
|
||||||
|
|
||||||
# If the user specified any packages to exclude from external find, add them here
|
|
||||||
if args.exclude:
|
|
||||||
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
|
|
||||||
|
|
||||||
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
|
|
||||||
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
|
|
||||||
|
|
||||||
new_entries = spack.detection.update_configuration(
|
new_entries = spack.detection.update_configuration(
|
||||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||||
)
|
)
|
||||||
if new_entries:
|
if new_entries:
|
||||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||||
msg = "The following specs have been detected on this system and added to {0}"
|
msg = "The following specs have been detected on this system and added to {0}"
|
||||||
tty.msg(msg.format(path))
|
tty.msg(msg.format(path))
|
||||||
spack.cmd.display_specs(new_entries)
|
spack.cmd.display_specs(new_entries)
|
||||||
@@ -173,6 +151,19 @@ def external_find(args):
|
|||||||
tty.msg("No new external packages detected")
|
tty.msg("No new external packages detected")
|
||||||
|
|
||||||
|
|
||||||
|
def packages_to_search_for(
|
||||||
|
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||||
|
):
|
||||||
|
result = []
|
||||||
|
for current_tag in tags:
|
||||||
|
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||||
|
if names:
|
||||||
|
result = [x for x in result if x in names]
|
||||||
|
if exclude:
|
||||||
|
result = [x for x in result if x not in exclude]
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def external_read_cray_manifest(args):
|
def external_read_cray_manifest(args):
|
||||||
_collect_and_consume_cray_manifest_files(
|
_collect_and_consume_cray_manifest_files(
|
||||||
manifest_file=args.file,
|
manifest_file=args.file,
|
||||||
|
|||||||
@@ -64,11 +64,11 @@ def section_title(s):
|
|||||||
|
|
||||||
|
|
||||||
def version(s):
|
def version(s):
|
||||||
return spack.spec.version_color + s + plain_format
|
return spack.spec.VERSION_COLOR + s + plain_format
|
||||||
|
|
||||||
|
|
||||||
def variant(s):
|
def variant(s):
|
||||||
return spack.spec.enabled_variant_color + s + plain_format
|
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||||
|
|
||||||
|
|
||||||
class VariantFormatter:
|
class VariantFormatter:
|
||||||
|
|||||||
@@ -52,6 +52,13 @@ def setup_parser(subparser):
|
|||||||
const="bat",
|
const="bat",
|
||||||
help="print bat commands to load the package",
|
help="print bat commands to load the package",
|
||||||
)
|
)
|
||||||
|
shells.add_argument(
|
||||||
|
"--pwsh",
|
||||||
|
action="store_const",
|
||||||
|
dest="shell",
|
||||||
|
const="pwsh",
|
||||||
|
help="print pwsh commands to load the package",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--first",
|
"--first",
|
||||||
|
|||||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# used to construct scope arguments below
|
# used to construct scope arguments below
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
# Add
|
# Add
|
||||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
|||||||
add_parser.add_argument(
|
add_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
|||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
|||||||
set_url_parser.add_argument(
|
set_url_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
|||||||
set_parser.add_argument(
|
set_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
|||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_list_scope(),
|
default=spack.config.default_list_scope(),
|
||||||
help="configuration scope to read from",
|
help="configuration scope to read from",
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@
|
|||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
|
||||||
|
|
||||||
# Create
|
# Create
|
||||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
|||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_list_scope(),
|
default=spack.config.default_list_scope(),
|
||||||
help="configuration scope to read from",
|
help="configuration scope to read from",
|
||||||
)
|
)
|
||||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
|||||||
add_parser.add_argument(
|
add_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
|||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=spack.config.SCOPES_METAVAR,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -137,7 +137,7 @@ def solve(parser, args):
|
|||||||
# these are the same options as `spack spec`
|
# these are the same options as `spack spec`
|
||||||
install_status_fn = spack.spec.Spec.install_status
|
install_status_fn = spack.spec.Spec.install_status
|
||||||
|
|
||||||
fmt = spack.spec.display_format
|
fmt = spack.spec.DISPLAY_FORMAT
|
||||||
if args.namespaces:
|
if args.namespaces:
|
||||||
fmt = "{namespace}." + fmt
|
fmt = "{namespace}." + fmt
|
||||||
|
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ def setup_parser(subparser):
|
|||||||
def spec(parser, args):
|
def spec(parser, args):
|
||||||
install_status_fn = spack.spec.Spec.install_status
|
install_status_fn = spack.spec.Spec.install_status
|
||||||
|
|
||||||
fmt = spack.spec.display_format
|
fmt = spack.spec.DISPLAY_FORMAT
|
||||||
if args.namespaces:
|
if args.namespaces:
|
||||||
fmt = "{namespace}." + fmt
|
fmt = "{namespace}." + fmt
|
||||||
|
|
||||||
|
|||||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
# Note: skip on windows here because for the moment,
|
# Note: skip on windows here because for the moment,
|
||||||
# clingo is wholly unsupported from bootstrap
|
# clingo is wholly unsupported from bootstrap
|
||||||
if sys.platform != "win32":
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
if pytest is None:
|
||||||
if pytest is None:
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
spack.bootstrap.ensure_environment_dependencies()
|
import pytest
|
||||||
import pytest
|
|
||||||
|
|
||||||
if args.pytest_help:
|
if args.pytest_help:
|
||||||
# make the pytest.main help output more accurate
|
# make the pytest.main help output more accurate
|
||||||
|
|||||||
@@ -51,6 +51,13 @@ def setup_parser(subparser):
|
|||||||
const="bat",
|
const="bat",
|
||||||
help="print bat commands to load the package",
|
help="print bat commands to load the package",
|
||||||
)
|
)
|
||||||
|
shells.add_argument(
|
||||||
|
"--pwsh",
|
||||||
|
action="store_const",
|
||||||
|
dest="shell",
|
||||||
|
const="pwsh",
|
||||||
|
help="print pwsh commands to load the package",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||||
|
|||||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
|||||||
|
|
||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
config_files = list()
|
config_files = list()
|
||||||
config = spack.config.config
|
config = spack.config.CONFIG
|
||||||
for scope in config.file_scopes:
|
for scope in config.file_scopes:
|
||||||
name = scope.name
|
name = scope.name
|
||||||
compiler_config = config.get("compilers", scope=name)
|
compiler_config = config.get("compilers", scope=name)
|
||||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
|||||||
"""
|
"""
|
||||||
candidate_scopes = [scope]
|
candidate_scopes = [scope]
|
||||||
if scope is None:
|
if scope is None:
|
||||||
candidate_scopes = spack.config.config.scopes.keys()
|
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||||
|
|
||||||
removal_happened = False
|
removal_happened = False
|
||||||
for current_scope in candidate_scopes:
|
for current_scope in candidate_scopes:
|
||||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
|||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||||
config = spack.config.config
|
config = spack.config.CONFIG
|
||||||
|
|
||||||
scope_to_compilers = {}
|
scope_to_compilers = {}
|
||||||
for scope in config.scopes:
|
for scope in config.scopes:
|
||||||
|
|||||||
@@ -99,6 +99,28 @@ def cxx17_flag(self):
|
|||||||
else:
|
else:
|
||||||
return "-std=c++17"
|
return "-std=c++17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx20_flag(self):
|
||||||
|
if self.real_version < Version("8.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C++20 standard", "cxx20_flag", "< 8.0"
|
||||||
|
)
|
||||||
|
elif self.real_version < Version("11.0"):
|
||||||
|
return "-std=c++2a"
|
||||||
|
else:
|
||||||
|
return "-std=c++20"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx23_flag(self):
|
||||||
|
if self.real_version < Version("11.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C++23 standard", "cxx23_flag", "< 11.0"
|
||||||
|
)
|
||||||
|
elif self.real_version < Version("14.0"):
|
||||||
|
return "-std=c++2b"
|
||||||
|
else:
|
||||||
|
return "-std=c++23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
if self.real_version < Version("4.5"):
|
if self.real_version < Version("4.5"):
|
||||||
|
|||||||
@@ -29,6 +29,90 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CmdCall:
|
||||||
|
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
|
||||||
|
|
||||||
|
def __init__(self, *cmds):
|
||||||
|
if not cmds:
|
||||||
|
raise RuntimeError(
|
||||||
|
"""Attempting to run commands from CMD without specifying commands.
|
||||||
|
Please add commands to be run."""
|
||||||
|
)
|
||||||
|
self._cmds = cmds
|
||||||
|
|
||||||
|
def __call__(self):
|
||||||
|
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
|
||||||
|
return out.decode("utf-16le", errors="replace") # novermin
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cmd_line(self):
|
||||||
|
base_call = "cmd /u /c "
|
||||||
|
commands = " && ".join([x.command_str() for x in self._cmds])
|
||||||
|
# If multiple commands are being invoked by a single subshell
|
||||||
|
# they must be encapsulated by a double quote. Always double
|
||||||
|
# quote to be sure of proper handling
|
||||||
|
# cmd will properly resolve nested double quotes as needed
|
||||||
|
#
|
||||||
|
# `set`` writes out the active env to the subshell stdout,
|
||||||
|
# and in this context we are always trying to obtain env
|
||||||
|
# state so it should always be appended
|
||||||
|
return base_call + f'"{commands} && set"'
|
||||||
|
|
||||||
|
|
||||||
|
class VarsInvocation:
|
||||||
|
def __init__(self, script):
|
||||||
|
self._script = script
|
||||||
|
|
||||||
|
def command_str(self):
|
||||||
|
return f'"{self._script}"'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def script(self):
|
||||||
|
return self._script
|
||||||
|
|
||||||
|
|
||||||
|
class VCVarsInvocation(VarsInvocation):
|
||||||
|
def __init__(self, script, arch, msvc_version):
|
||||||
|
super(VCVarsInvocation, self).__init__(script)
|
||||||
|
self._arch = arch
|
||||||
|
self._msvc_version = msvc_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sdk_ver(self):
|
||||||
|
"""Accessor for Windows SDK version property
|
||||||
|
|
||||||
|
Note: This property may not be set by
|
||||||
|
the calling context and as such this property will
|
||||||
|
return an empty string
|
||||||
|
|
||||||
|
This property will ONLY be set if the SDK package
|
||||||
|
is a dependency somewhere in the Spack DAG of the package
|
||||||
|
for which we are constructing an MSVC compiler env.
|
||||||
|
Otherwise this property should be unset to allow the VCVARS
|
||||||
|
script to use its internal heuristics to determine appropriate
|
||||||
|
SDK version
|
||||||
|
"""
|
||||||
|
if getattr(self, "_sdk_ver", None):
|
||||||
|
return self._sdk_ver + ".0"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@sdk_ver.setter
|
||||||
|
def sdk_ver(self, val):
|
||||||
|
self._sdk_ver = val
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arch(self):
|
||||||
|
return self._arch
|
||||||
|
|
||||||
|
@property
|
||||||
|
def vcvars_ver(self):
|
||||||
|
return f"-vcvars_ver={self._msvc_version}"
|
||||||
|
|
||||||
|
def command_str(self):
|
||||||
|
script = super(VCVarsInvocation, self).command_str()
|
||||||
|
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||||
|
|
||||||
|
|
||||||
def get_valid_fortran_pth(comp_ver):
|
def get_valid_fortran_pth(comp_ver):
|
||||||
cl_ver = str(comp_ver)
|
cl_ver = str(comp_ver)
|
||||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||||
@@ -75,22 +159,48 @@ class Msvc(Compiler):
|
|||||||
# file based on compiler executable path.
|
# file based on compiler executable path.
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
# This positional argument "paths" is later parsed and process by the base class
|
||||||
args[3][:] = new_pth
|
# via the call to `super` later in this method
|
||||||
|
paths = args[3]
|
||||||
|
# This positional argument "cspec" is also parsed and handled by the base class
|
||||||
|
# constructor
|
||||||
|
cspec = args[0]
|
||||||
|
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||||
|
paths[:] = new_pth
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
if os.getenv("ONEAPI_ROOT"):
|
# To use the MSVC compilers, VCVARS must be invoked
|
||||||
|
# VCVARS is located at a fixed location, referencable
|
||||||
|
# idiomatically by the following relative path from the
|
||||||
|
# compiler.
|
||||||
|
# Spack first finds the compilers via VSWHERE
|
||||||
|
# and stores their path, but their respective VCVARS
|
||||||
|
# file must be invoked before useage.
|
||||||
|
env_cmds = []
|
||||||
|
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||||
|
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||||
|
# get current platform architecture and format for vcvars argument
|
||||||
|
arch = spack.platforms.real_host().default.lower()
|
||||||
|
arch = arch.replace("-", "_")
|
||||||
|
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||||
|
env_cmds.append(self.vcvars_call)
|
||||||
|
# Below is a check for a valid fortran path
|
||||||
|
# paths has c, cxx, fc, and f77 paths in that order
|
||||||
|
# paths[2] refers to the fc path and is a generic check
|
||||||
|
# for a fortran compiler
|
||||||
|
if paths[2]:
|
||||||
# If this found, it sets all the vars
|
# If this found, it sets all the vars
|
||||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||||
else:
|
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||||
# To use the MSVC compilers, VCVARS must be invoked
|
oneapi_version_setvars = os.path.join(
|
||||||
# VCVARS is located at a fixed location, referencable
|
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||||
# idiomatically by the following relative path from the
|
)
|
||||||
# compiler.
|
# order matters here, the specific version env must be invoked first,
|
||||||
# Spack first finds the compilers via VSWHERE
|
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||||
# and stores their path, but their respective VCVARS
|
# env first
|
||||||
# file must be invoked before useage.
|
env_cmds.extend(
|
||||||
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
|
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
|
||||||
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
|
)
|
||||||
|
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def msvc_version(self):
|
def msvc_version(self):
|
||||||
@@ -119,16 +229,30 @@ def platform_toolset_ver(self):
|
|||||||
"""
|
"""
|
||||||
return self.msvc_version[:2].joined.string[:3]
|
return self.msvc_version[:2].joined.string[:3]
|
||||||
|
|
||||||
@property
|
def _compiler_version(self, compiler):
|
||||||
def cl_version(self):
|
"""Returns version object for given compiler"""
|
||||||
"""Cl toolset version"""
|
# ignore_errors below is true here due to ifx's
|
||||||
|
# non zero return code if it is not provided
|
||||||
|
# and input file
|
||||||
return Version(
|
return Version(
|
||||||
re.search(
|
re.search(
|
||||||
Msvc.version_regex,
|
Msvc.version_regex,
|
||||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
spack.compiler.get_compiler_version_output(
|
||||||
|
compiler, version_arg=None, ignore_errors=True
|
||||||
|
),
|
||||||
).group(1)
|
).group(1)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cl_version(self):
|
||||||
|
"""Cl toolset version"""
|
||||||
|
return self._compiler_version(self.cc)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ifx_version(self):
|
||||||
|
"""Ifx compiler version associated with this version of MSVC"""
|
||||||
|
return self._compiler_version(self.fc)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def vs_root(self):
|
def vs_root(self):
|
||||||
# The MSVC install root is located at a fix level above the compiler
|
# The MSVC install root is located at a fix level above the compiler
|
||||||
@@ -146,27 +270,12 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
# output, sort into dictionary, use that to make the build
|
# output, sort into dictionary, use that to make the build
|
||||||
# environment.
|
# environment.
|
||||||
|
|
||||||
# get current platform architecture and format for vcvars argument
|
|
||||||
arch = spack.platforms.real_host().default.lower()
|
|
||||||
arch = arch.replace("-", "_")
|
|
||||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||||
# version, if needed by spec
|
# version, if needed by spec
|
||||||
sdk_ver = (
|
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
|
||||||
""
|
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
|
||||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
|
||||||
else pkg.spec["win-sdk"].version.string + ".0"
|
|
||||||
)
|
|
||||||
# provide vcvars with msvc version selected by concretization,
|
|
||||||
# not whatever it happens to pick up on the system (highest available version)
|
|
||||||
out = subprocess.check_output( # novermin
|
|
||||||
'cmd /u /c "{}" {} {} {} && set'.format(
|
|
||||||
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
|
||||||
),
|
|
||||||
stderr=subprocess.STDOUT,
|
|
||||||
)
|
|
||||||
if sys.version_info[0] >= 3:
|
|
||||||
out = out.decode("utf-16le", errors="replace") # novermin
|
|
||||||
|
|
||||||
|
out = self.msvc_compiler_environment()
|
||||||
int_env = dict(
|
int_env = dict(
|
||||||
(key, value)
|
(key, value)
|
||||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||||
|
|||||||
@@ -28,6 +28,7 @@
|
|||||||
|
|
||||||
import spack.abi
|
import spack.abi
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
|
import spack.config
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
@@ -37,7 +38,6 @@
|
|||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.variant as vt
|
import spack.variant as vt
|
||||||
from spack.config import config
|
|
||||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||||
from spack.version import ClosedOpenRange, VersionList, ver
|
from spack.version import ClosedOpenRange, VersionList, ver
|
||||||
|
|
||||||
@@ -76,7 +76,7 @@ class Concretizer:
|
|||||||
|
|
||||||
def __init__(self, abstract_spec=None):
|
def __init__(self, abstract_spec=None):
|
||||||
if Concretizer.check_for_compiler_existence is None:
|
if Concretizer.check_for_compiler_existence is None:
|
||||||
Concretizer.check_for_compiler_existence = not config.get(
|
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||||
"config:install_missing_compilers", False
|
"config:install_missing_compilers", False
|
||||||
)
|
)
|
||||||
self.abstract_spec = abstract_spec
|
self.abstract_spec = abstract_spec
|
||||||
|
|||||||
@@ -47,6 +47,8 @@
|
|||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
|
import spack.schema.cdash
|
||||||
|
import spack.schema.ci
|
||||||
import spack.schema.compilers
|
import spack.schema.compilers
|
||||||
import spack.schema.concretizer
|
import spack.schema.concretizer
|
||||||
import spack.schema.config
|
import spack.schema.config
|
||||||
@@ -64,7 +66,7 @@
|
|||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
section_schemas = {
|
SECTION_SCHEMAS = {
|
||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -80,16 +82,16 @@
|
|||||||
|
|
||||||
# Same as above, but including keys for environments
|
# Same as above, but including keys for environments
|
||||||
# this allows us to unify config reading between configs and environments
|
# this allows us to unify config reading between configs and environments
|
||||||
all_schemas = copy.deepcopy(section_schemas)
|
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||||
|
|
||||||
#: Path to the default configuration
|
#: Path to the default configuration
|
||||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||||
|
|
||||||
#: Hard-coded default values for some key configuration options.
|
#: Hard-coded default values for some key configuration options.
|
||||||
#: This ensures that Spack will still work even if config.yaml in
|
#: This ensures that Spack will still work even if config.yaml in
|
||||||
#: the defaults scope is removed.
|
#: the defaults scope is removed.
|
||||||
config_defaults = {
|
CONFIG_DEFAULTS = {
|
||||||
"config": {
|
"config": {
|
||||||
"debug": False,
|
"debug": False,
|
||||||
"connect_timeout": 10,
|
"connect_timeout": 10,
|
||||||
@@ -105,10 +107,10 @@
|
|||||||
|
|
||||||
#: metavar to use for commands that accept scopes
|
#: metavar to use for commands that accept scopes
|
||||||
#: this is shorter and more readable than listing all choices
|
#: this is shorter and more readable than listing all choices
|
||||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||||
|
|
||||||
#: Base name for the (internal) overrides scope.
|
#: Base name for the (internal) overrides scope.
|
||||||
overrides_base_name = "overrides-"
|
_OVERRIDES_BASE_NAME = "overrides-"
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
|||||||
def get_section(self, section):
|
def get_section(self, section):
|
||||||
if section not in self.sections:
|
if section not in self.sections:
|
||||||
path = self.get_section_filename(section)
|
path = self.get_section_filename(section)
|
||||||
schema = section_schemas[section]
|
schema = SECTION_SCHEMAS[section]
|
||||||
data = read_config_file(path, schema)
|
data = read_config_file(path, schema)
|
||||||
self.sections[section] = data
|
self.sections[section] = data
|
||||||
return self.sections[section]
|
return self.sections[section]
|
||||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
|||||||
|
|
||||||
# We copy data here to avoid adding defaults at write time
|
# We copy data here to avoid adding defaults at write time
|
||||||
validate_data = copy.deepcopy(data)
|
validate_data = copy.deepcopy(data)
|
||||||
validate(validate_data, section_schemas[section])
|
validate(validate_data, SECTION_SCHEMAS[section])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mkdirp(self.path)
|
mkdirp(self.path)
|
||||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
|||||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||||
for section in data:
|
for section in data:
|
||||||
dsec = data[section]
|
dsec = data[section]
|
||||||
validate({section: dsec}, section_schemas[section])
|
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||||
|
|
||||||
def get_section_filename(self, section):
|
def get_section_filename(self, section):
|
||||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
|||||||
"""This only validates, as the data is already in memory."""
|
"""This only validates, as the data is already in memory."""
|
||||||
data = self.get_section(section)
|
data = self.get_section(section)
|
||||||
if data is not None:
|
if data is not None:
|
||||||
validate(data, section_schemas[section])
|
validate(data, SECTION_SCHEMAS[section])
|
||||||
self.sections[section] = _mark_internal(data, self.name)
|
self.sections[section] = _mark_internal(data, self.name)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
|||||||
return [
|
return [
|
||||||
s
|
s
|
||||||
for s in self.scopes.values()
|
for s in self.scopes.values()
|
||||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||||
]
|
]
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
|||||||
"""
|
"""
|
||||||
if isinstance(path_or_scope, ConfigScope):
|
if isinstance(path_or_scope, ConfigScope):
|
||||||
overrides = path_or_scope
|
overrides = path_or_scope
|
||||||
config.push_scope(path_or_scope)
|
CONFIG.push_scope(path_or_scope)
|
||||||
else:
|
else:
|
||||||
base_name = overrides_base_name
|
base_name = _OVERRIDES_BASE_NAME
|
||||||
# Ensure the new override gets a unique scope name
|
# Ensure the new override gets a unique scope name
|
||||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||||
num_overrides = len(current_overrides)
|
num_overrides = len(current_overrides)
|
||||||
while True:
|
while True:
|
||||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
|||||||
break
|
break
|
||||||
|
|
||||||
overrides = InternalConfigScope(scope_name)
|
overrides = InternalConfigScope(scope_name)
|
||||||
config.push_scope(overrides)
|
CONFIG.push_scope(overrides)
|
||||||
config.set(path_or_scope, value, scope=scope_name)
|
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield config
|
yield CONFIG
|
||||||
finally:
|
finally:
|
||||||
scope = config.remove_scope(overrides.name)
|
scope = CONFIG.remove_scope(overrides.name)
|
||||||
assert scope is overrides
|
assert scope is overrides
|
||||||
|
|
||||||
|
|
||||||
#: configuration scopes added on the command line
|
#: configuration scopes added on the command line
|
||||||
#: set by ``spack.main.main()``.
|
#: set by ``spack.main.main()``.
|
||||||
command_line_scopes: List[str] = []
|
COMMAND_LINE_SCOPES: List[str] = []
|
||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(cfg, scope_type, name, path):
|
def _add_platform_scope(cfg, scope_type, name, path):
|
||||||
@@ -781,14 +783,14 @@ def create():
|
|||||||
cfg = Configuration()
|
cfg = Configuration()
|
||||||
|
|
||||||
# first do the builtin, hardcoded defaults
|
# first do the builtin, hardcoded defaults
|
||||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||||
cfg.push_scope(builtin)
|
cfg.push_scope(builtin)
|
||||||
|
|
||||||
# Builtin paths to configuration files in Spack
|
# Builtin paths to configuration files in Spack
|
||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
# Default configuration scope is the lowest-level scope. These are
|
# Default configuration scope is the lowest-level scope. These are
|
||||||
# versioned with Spack and can be overridden by systems, sites or users
|
# versioned with Spack and can be overridden by systems, sites or users
|
||||||
configuration_defaults_path
|
CONFIGURATION_DEFAULTS_PATH
|
||||||
]
|
]
|
||||||
|
|
||||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||||
@@ -815,7 +817,7 @@ def create():
|
|||||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||||
|
|
||||||
# add command-line scopes
|
# add command-line scopes
|
||||||
_add_command_line_scopes(cfg, command_line_scopes)
|
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||||
|
|
||||||
# we make a special scope for spack commands so that they can
|
# we make a special scope for spack commands so that they can
|
||||||
# override configuration options.
|
# override configuration options.
|
||||||
@@ -825,7 +827,7 @@ def create():
|
|||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename, scope=None):
|
def add_from_file(filename, scope=None):
|
||||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
|||||||
# update all sections from config dict
|
# update all sections from config dict
|
||||||
# We have to iterate on keys to keep overrides from the file
|
# We have to iterate on keys to keep overrides from the file
|
||||||
for section in data.keys():
|
for section in data.keys():
|
||||||
if section in section_schemas.keys():
|
if section in SECTION_SCHEMAS.keys():
|
||||||
# Special handling for compiler scope difference
|
# Special handling for compiler scope difference
|
||||||
# Has to be handled after we choose a section
|
# Has to be handled after we choose a section
|
||||||
if scope is None:
|
if scope is None:
|
||||||
@@ -849,18 +851,18 @@ def add_from_file(filename, scope=None):
|
|||||||
new = merge_yaml(existing, value)
|
new = merge_yaml(existing, value)
|
||||||
|
|
||||||
# We cannot call config.set directly (set is a type)
|
# We cannot call config.set directly (set is a type)
|
||||||
config.set(section, new, scope)
|
CONFIG.set(section, new, scope)
|
||||||
|
|
||||||
|
|
||||||
def add(fullpath, scope=None):
|
def add(fullpath, scope=None):
|
||||||
"""Add the given configuration to the specified config scope.
|
"""Add the given configuration to the specified config scope.
|
||||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||||
|
|
||||||
components = process_config_path(fullpath)
|
components = process_config_path(fullpath)
|
||||||
|
|
||||||
has_existing_value = True
|
has_existing_value = True
|
||||||
path = ""
|
path = ""
|
||||||
override = False
|
override = False
|
||||||
|
value = syaml.load_config(components[-1])
|
||||||
for idx, name in enumerate(components[:-1]):
|
for idx, name in enumerate(components[:-1]):
|
||||||
# First handle double colons in constructing path
|
# First handle double colons in constructing path
|
||||||
colon = "::" if override else ":" if path else ""
|
colon = "::" if override else ":" if path else ""
|
||||||
@@ -881,14 +883,14 @@ def add(fullpath, scope=None):
|
|||||||
existing = get_valid_type(path)
|
existing = get_valid_type(path)
|
||||||
|
|
||||||
# construct value from this point down
|
# construct value from this point down
|
||||||
value = syaml.load_config(components[-1])
|
|
||||||
for component in reversed(components[idx + 1 : -1]):
|
for component in reversed(components[idx + 1 : -1]):
|
||||||
value = {component: value}
|
value = {component: value}
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if override:
|
||||||
|
path += "::"
|
||||||
|
|
||||||
if has_existing_value:
|
if has_existing_value:
|
||||||
path, _, value = fullpath.rpartition(":")
|
|
||||||
value = syaml.load_config(value)
|
|
||||||
existing = get(path, scope=scope)
|
existing = get(path, scope=scope)
|
||||||
|
|
||||||
# append values to lists
|
# append values to lists
|
||||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
|||||||
|
|
||||||
# merge value into existing
|
# merge value into existing
|
||||||
new = merge_yaml(existing, value)
|
new = merge_yaml(existing, value)
|
||||||
config.set(path, new, scope)
|
CONFIG.set(path, new, scope)
|
||||||
|
|
||||||
|
|
||||||
def get(path, default=None, scope=None):
|
def get(path, default=None, scope=None):
|
||||||
"""Module-level wrapper for ``Configuration.get()``."""
|
"""Module-level wrapper for ``Configuration.get()``."""
|
||||||
return config.get(path, default, scope)
|
return CONFIG.get(path, default, scope)
|
||||||
|
|
||||||
|
|
||||||
def set(path, value, scope=None):
|
def set(path, value, scope=None):
|
||||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
return config.set(path, value, scope)
|
return CONFIG.set(path, value, scope)
|
||||||
|
|
||||||
|
|
||||||
def add_default_platform_scope(platform):
|
def add_default_platform_scope(platform):
|
||||||
plat_name = os.path.join("defaults", platform)
|
plat_name = os.path.join("defaults", platform)
|
||||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||||
|
|
||||||
|
|
||||||
def scopes():
|
def scopes():
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return config.scopes
|
return CONFIG.scopes
|
||||||
|
|
||||||
|
|
||||||
def _validate_section_name(section):
|
def _validate_section_name(section):
|
||||||
"""Exit if the section is not a valid section."""
|
"""Exit if the section is not a valid section."""
|
||||||
if section not in section_schemas:
|
if section not in SECTION_SCHEMAS:
|
||||||
raise ConfigSectionError(
|
raise ConfigSectionError(
|
||||||
"Invalid config section: '%s'. Options are: %s"
|
"Invalid config section: '%s'. Options are: %s"
|
||||||
% (section, " ".join(section_schemas.keys()))
|
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
|||||||
if data:
|
if data:
|
||||||
if not schema:
|
if not schema:
|
||||||
key = next(iter(data))
|
key = next(iter(data))
|
||||||
schema = all_schemas[key]
|
schema = _ALL_SCHEMAS[key]
|
||||||
validate(data, schema)
|
validate(data, schema)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
|||||||
test_data = {component: test_data}
|
test_data = {component: test_data}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validate(test_data, section_schemas[section])
|
validate(test_data, SECTION_SCHEMAS[section])
|
||||||
except (ConfigFormatError, AttributeError) as e:
|
except (ConfigFormatError, AttributeError) as e:
|
||||||
jsonschema_error = e.validation_error
|
jsonschema_error = e.validation_error
|
||||||
if jsonschema_error.validator == "type":
|
if jsonschema_error.validator == "type":
|
||||||
@@ -1229,11 +1231,17 @@ def they_are(t):
|
|||||||
return copy.copy(source)
|
return copy.copy(source)
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
|
||||||
# trailing ':')
|
|
||||||
#
|
|
||||||
def process_config_path(path):
|
def process_config_path(path):
|
||||||
|
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||||
|
trailing ':')
|
||||||
|
|
||||||
|
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||||
|
quoted path components outside of the value will be considered ill formed and will
|
||||||
|
raise.
|
||||||
|
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||||
|
|
||||||
|
[this, is, a, path, value:with:colon]
|
||||||
|
"""
|
||||||
result = []
|
result = []
|
||||||
if path.startswith(":"):
|
if path.startswith(":"):
|
||||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||||
@@ -1261,6 +1269,17 @@ def process_config_path(path):
|
|||||||
front.append = True
|
front.append = True
|
||||||
|
|
||||||
result.append(front)
|
result.append(front)
|
||||||
|
|
||||||
|
quote = "['\"]"
|
||||||
|
not_quote = "[^'\"]"
|
||||||
|
|
||||||
|
if re.match(f"^{quote}", path):
|
||||||
|
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
|
||||||
|
if not m:
|
||||||
|
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||||
|
result.append(m.group(1))
|
||||||
|
break
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@@ -1278,9 +1297,9 @@ def default_modify_scope(section="config"):
|
|||||||
If this is not 'compilers', a general (non-platform) scope is used.
|
If this is not 'compilers', a general (non-platform) scope is used.
|
||||||
"""
|
"""
|
||||||
if section == "compilers":
|
if section == "compilers":
|
||||||
return spack.config.config.highest_precedence_scope().name
|
return CONFIG.highest_precedence_scope().name
|
||||||
else:
|
else:
|
||||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
return CONFIG.highest_precedence_non_platform_scope().name
|
||||||
|
|
||||||
|
|
||||||
def default_list_scope():
|
def default_list_scope():
|
||||||
@@ -1337,18 +1356,18 @@ def use_configuration(*scopes_or_paths):
|
|||||||
Returns:
|
Returns:
|
||||||
Configuration object associated with the scopes passed as arguments
|
Configuration object associated with the scopes passed as arguments
|
||||||
"""
|
"""
|
||||||
global config
|
global CONFIG
|
||||||
|
|
||||||
# Normalize input and construct a Configuration object
|
# Normalize input and construct a Configuration object
|
||||||
configuration = _config_from(scopes_or_paths)
|
configuration = _config_from(scopes_or_paths)
|
||||||
config.clear_caches(), configuration.clear_caches()
|
CONFIG.clear_caches(), configuration.clear_caches()
|
||||||
|
|
||||||
saved_config, config = config, configuration
|
saved_config, CONFIG = CONFIG, configuration
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield configuration
|
yield configuration
|
||||||
finally:
|
finally:
|
||||||
config = saved_config
|
CONFIG = saved_config
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
"""Writers for different kind of recipes and related
|
"""Writers for different kind of recipes and related
|
||||||
convenience functions.
|
convenience functions.
|
||||||
"""
|
"""
|
||||||
import collections
|
|
||||||
import copy
|
import copy
|
||||||
|
from collections import namedtuple
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -159,13 +159,13 @@ def depfile(self):
|
|||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Information related to the run image."""
|
"""Information related to the run image."""
|
||||||
Run = collections.namedtuple("Run", ["image"])
|
Run = namedtuple("Run", ["image"])
|
||||||
return Run(image=self.final_image)
|
return Run(image=self.final_image)
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def build(self):
|
def build(self):
|
||||||
"""Information related to the build image."""
|
"""Information related to the build image."""
|
||||||
Build = collections.namedtuple("Build", ["image"])
|
Build = namedtuple("Build", ["image"])
|
||||||
return Build(image=self.build_image)
|
return Build(image=self.build_image)
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
@@ -176,12 +176,13 @@ def strip(self):
|
|||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def paths(self):
|
def paths(self):
|
||||||
"""Important paths in the image"""
|
"""Important paths in the image"""
|
||||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||||
return Paths(
|
return Paths(
|
||||||
environment="/opt/spack-environment",
|
environment="/opt/spack-environment",
|
||||||
store="/opt/software",
|
store="/opt/software",
|
||||||
hidden_view="/opt/._view",
|
view_parent="/opt/views",
|
||||||
view="/opt/view",
|
view="/opt/views/view",
|
||||||
|
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||||
)
|
)
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
|||||||
|
|
||||||
update, install, clean = commands_for(os_pkg_manager)
|
update, install, clean = commands_for(os_pkg_manager)
|
||||||
|
|
||||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||||
|
|
||||||
def _os_pkg_manager(self):
|
def _os_pkg_manager(self):
|
||||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
|||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def extra_instructions(self):
|
def extra_instructions(self):
|
||||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
Extras = namedtuple("Extra", ["build", "final"])
|
||||||
extras = self.container_config.get("extra_instructions", {})
|
extras = self.container_config.get("extra_instructions", {})
|
||||||
build, final = extras.get("build", None), extras.get("final", None)
|
build, final = extras.get("build", None), extras.get("final", None)
|
||||||
return Extras(build=build, final=final)
|
return Extras(build=build, final=final)
|
||||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
|||||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||||
|
|
||||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
|||||||
render_bootstrap = bool(self.bootstrap_image)
|
render_bootstrap = bool(self.bootstrap_image)
|
||||||
render_build = not (self.last_phase == "bootstrap")
|
render_build = not (self.last_phase == "bootstrap")
|
||||||
render_final = self.last_phase in (None, "final")
|
render_final = self.last_phase in (None, "final")
|
||||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
|
|||||||
@@ -3,12 +3,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||||
from .path import by_executable, by_library, executables_in_path
|
from .path import by_path, executables_in_path
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"DetectedPackage",
|
"DetectedPackage",
|
||||||
"by_library",
|
"by_path",
|
||||||
"by_executable",
|
|
||||||
"executables_in_path",
|
"executables_in_path",
|
||||||
"executable_prefix",
|
"executable_prefix",
|
||||||
"update_configuration",
|
"update_configuration",
|
||||||
|
|||||||
@@ -13,13 +13,13 @@
|
|||||||
The module also contains other functions that might be useful across different
|
The module also contains other functions that might be useful across different
|
||||||
detection mechanisms.
|
detection mechanisms.
|
||||||
"""
|
"""
|
||||||
import collections
|
|
||||||
import glob
|
import glob
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
@@ -29,12 +29,28 @@
|
|||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
import spack.util.windows_registry
|
import spack.util.windows_registry
|
||||||
|
|
||||||
#: Information on a package that has been detected
|
|
||||||
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
|
class DetectedPackage(NamedTuple):
|
||||||
|
"""Information on a package that has been detected."""
|
||||||
|
|
||||||
|
#: Spec that was detected
|
||||||
|
spec: spack.spec.Spec
|
||||||
|
#: Prefix of the spec
|
||||||
|
prefix: str
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def restore(
|
||||||
|
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||||
|
) -> "DetectedPackage":
|
||||||
|
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||||
|
return DetectedPackage(spec=spec, prefix=prefix)
|
||||||
|
|
||||||
|
|
||||||
def _externals_in_packages_yaml():
|
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||||
"""Return all the specs mentioned as externals in packages.yaml"""
|
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||||
packages_yaml = spack.config.get("packages")
|
packages_yaml = spack.config.get("packages")
|
||||||
already_defined_specs = set()
|
already_defined_specs = set()
|
||||||
for pkg_name, package_configuration in packages_yaml.items():
|
for pkg_name, package_configuration in packages_yaml.items():
|
||||||
@@ -43,7 +59,12 @@ def _externals_in_packages_yaml():
|
|||||||
return already_defined_specs
|
return already_defined_specs
|
||||||
|
|
||||||
|
|
||||||
def _pkg_config_dict(external_pkg_entries):
|
ExternalEntryType = Union[str, Dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
|
def _pkg_config_dict(
|
||||||
|
external_pkg_entries: List[DetectedPackage],
|
||||||
|
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||||
|
|
||||||
This does not generate the entire packages.yaml. For example, given some
|
This does not generate the entire packages.yaml. For example, given some
|
||||||
@@ -65,7 +86,10 @@ def _pkg_config_dict(external_pkg_entries):
|
|||||||
if not _spec_is_valid(e.spec):
|
if not _spec_is_valid(e.spec):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
|
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||||
|
("spec", str(e.spec)),
|
||||||
|
("prefix", e.prefix),
|
||||||
|
]
|
||||||
if e.spec.external_modules:
|
if e.spec.external_modules:
|
||||||
external_items.append(("modules", e.spec.external_modules))
|
external_items.append(("modules", e.spec.external_modules))
|
||||||
|
|
||||||
@@ -83,15 +107,14 @@ def _pkg_config_dict(external_pkg_entries):
|
|||||||
return pkg_dict
|
return pkg_dict
|
||||||
|
|
||||||
|
|
||||||
def _spec_is_valid(spec):
|
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||||
try:
|
try:
|
||||||
str(spec)
|
str(spec)
|
||||||
except spack.error.SpackError:
|
except spack.error.SpackError:
|
||||||
# It is assumed here that we can at least extract the package name from
|
# It is assumed here that we can at least extract the package name from the spec so we
|
||||||
# the spec so we can look up the implementation of
|
# can look up the implementation of determine_spec_details
|
||||||
# determine_spec_details
|
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||||
msg = "Constructed spec for {0} does not have a string representation"
|
llnl.util.tty.warn(msg)
|
||||||
llnl.util.tty.warn(msg.format(spec.name))
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -106,7 +129,7 @@ def _spec_is_valid(spec):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def path_to_dict(search_paths):
|
def path_to_dict(search_paths: List[str]):
|
||||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||||
path_to_lib = {}
|
path_to_lib = {}
|
||||||
# Reverse order of search directories so that a lib in the first
|
# Reverse order of search directories so that a lib in the first
|
||||||
@@ -124,7 +147,7 @@ def path_to_dict(search_paths):
|
|||||||
return path_to_lib
|
return path_to_lib
|
||||||
|
|
||||||
|
|
||||||
def is_executable(file_path):
|
def is_executable(file_path: str) -> bool:
|
||||||
"""Return True if the path passed as argument is that of an executable"""
|
"""Return True if the path passed as argument is that of an executable"""
|
||||||
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
||||||
|
|
||||||
@@ -146,7 +169,7 @@ def _convert_to_iterable(single_val_or_multiple):
|
|||||||
return [x]
|
return [x]
|
||||||
|
|
||||||
|
|
||||||
def executable_prefix(executable_dir):
|
def executable_prefix(executable_dir: str) -> str:
|
||||||
"""Given a directory where an executable is found, guess the prefix
|
"""Given a directory where an executable is found, guess the prefix
|
||||||
(i.e. the "root" directory of that installation) and return it.
|
(i.e. the "root" directory of that installation) and return it.
|
||||||
|
|
||||||
@@ -167,12 +190,12 @@ def executable_prefix(executable_dir):
|
|||||||
return os.sep.join(components[:idx])
|
return os.sep.join(components[:idx])
|
||||||
|
|
||||||
|
|
||||||
def library_prefix(library_dir):
|
def library_prefix(library_dir: str) -> str:
|
||||||
"""Given a directory where an library is found, guess the prefix
|
"""Given a directory where a library is found, guess the prefix
|
||||||
(i.e. the "root" directory of that installation) and return it.
|
(i.e. the "root" directory of that installation) and return it.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
library_dir: directory where an library is found
|
library_dir: directory where a library is found
|
||||||
"""
|
"""
|
||||||
# Given a prefix where an library is found, assuming that prefix
|
# Given a prefix where an library is found, assuming that prefix
|
||||||
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
|
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
|
||||||
@@ -195,13 +218,17 @@ def library_prefix(library_dir):
|
|||||||
return library_dir
|
return library_dir
|
||||||
|
|
||||||
|
|
||||||
def update_configuration(detected_packages, scope=None, buildable=True):
|
def update_configuration(
|
||||||
|
detected_packages: Dict[str, List[DetectedPackage]],
|
||||||
|
scope: Optional[str] = None,
|
||||||
|
buildable: bool = True,
|
||||||
|
) -> List[spack.spec.Spec]:
|
||||||
"""Add the packages passed as arguments to packages.yaml
|
"""Add the packages passed as arguments to packages.yaml
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
detected_packages (list): list of DetectedPackage objects to be added
|
detected_packages: list of DetectedPackage objects to be added
|
||||||
scope (str): configuration scope where to add the detected packages
|
scope: configuration scope where to add the detected packages
|
||||||
buildable (bool): whether the detected packages are buildable or not
|
buildable: whether the detected packages are buildable or not
|
||||||
"""
|
"""
|
||||||
predefined_external_specs = _externals_in_packages_yaml()
|
predefined_external_specs = _externals_in_packages_yaml()
|
||||||
pkg_to_cfg, all_new_specs = {}, []
|
pkg_to_cfg, all_new_specs = {}, []
|
||||||
@@ -209,7 +236,10 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
|||||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||||
|
|
||||||
pkg_config = _pkg_config_dict(new_entries)
|
pkg_config = _pkg_config_dict(new_entries)
|
||||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
|
external_entries = pkg_config.get("externals", [])
|
||||||
|
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||||
|
|
||||||
|
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||||
if buildable is False:
|
if buildable is False:
|
||||||
pkg_config["buildable"] = False
|
pkg_config["buildable"] = False
|
||||||
pkg_to_cfg[package_name] = pkg_config
|
pkg_to_cfg[package_name] = pkg_config
|
||||||
@@ -222,16 +252,19 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
|||||||
return all_new_specs
|
return all_new_specs
|
||||||
|
|
||||||
|
|
||||||
def _windows_drive():
|
def _windows_drive() -> str:
|
||||||
"""Return Windows drive string extracted from PROGRAMFILES
|
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||||
env var, which is garunteed to be defined for all logins"""
|
which is guaranteed to be defined for all logins.
|
||||||
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
|
"""
|
||||||
return drive
|
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
|
||||||
|
if match is None:
|
||||||
|
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
|
||||||
class WindowsCompilerExternalPaths:
|
class WindowsCompilerExternalPaths:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_compiler_root_paths():
|
def find_windows_compiler_root_paths() -> List[str]:
|
||||||
"""Helper for Windows compiler installation root discovery
|
"""Helper for Windows compiler installation root discovery
|
||||||
|
|
||||||
At the moment simply returns location of VS install paths from VSWhere
|
At the moment simply returns location of VS install paths from VSWhere
|
||||||
@@ -239,7 +272,7 @@ def find_windows_compiler_root_paths():
|
|||||||
return list(winOs.WindowsOs.vs_install_paths)
|
return list(winOs.WindowsOs.vs_install_paths)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_compiler_cmake_paths():
|
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||||
"""Semi hard-coded search path for cmake bundled with MSVC"""
|
"""Semi hard-coded search path for cmake bundled with MSVC"""
|
||||||
return [
|
return [
|
||||||
os.path.join(
|
os.path.join(
|
||||||
@@ -249,7 +282,7 @@ def find_windows_compiler_cmake_paths():
|
|||||||
]
|
]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_compiler_ninja_paths():
|
def find_windows_compiler_ninja_paths() -> List[str]:
|
||||||
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
|
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
|
||||||
return [
|
return [
|
||||||
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
|
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
|
||||||
@@ -257,7 +290,7 @@ def find_windows_compiler_ninja_paths():
|
|||||||
]
|
]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_compiler_bundled_packages():
|
def find_windows_compiler_bundled_packages() -> List[str]:
|
||||||
"""Return all MSVC compiler bundled packages"""
|
"""Return all MSVC compiler bundled packages"""
|
||||||
return (
|
return (
|
||||||
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
|
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
|
||||||
@@ -266,14 +299,15 @@ def find_windows_compiler_bundled_packages():
|
|||||||
|
|
||||||
|
|
||||||
class WindowsKitExternalPaths:
|
class WindowsKitExternalPaths:
|
||||||
|
plat_major_ver = None
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
plat_major_ver = str(winOs.windows_version()[0])
|
plat_major_ver = str(winOs.windows_version()[0])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_roots():
|
def find_windows_kit_roots() -> Optional[str]:
|
||||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
return []
|
return None
|
||||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||||
kit_base = os.path.join(
|
kit_base = os.path.join(
|
||||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||||
@@ -281,21 +315,23 @@ def find_windows_kit_roots():
|
|||||||
return kit_base
|
return kit_base
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_bin_paths(kit_base=None):
|
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||||
"""Returns Windows kit bin directory per version"""
|
"""Returns Windows kit bin directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
|
assert kit_base is not None, "unexpected value for kit_base"
|
||||||
kit_bin = os.path.join(kit_base, "bin")
|
kit_bin = os.path.join(kit_base, "bin")
|
||||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_lib_paths(kit_base=None):
|
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||||
"""Returns Windows kit lib directory per version"""
|
"""Returns Windows kit lib directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
|
assert kit_base is not None, "unexpected value for kit_base"
|
||||||
kit_lib = os.path.join(kit_base, "Lib")
|
kit_lib = os.path.join(kit_base, "Lib")
|
||||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_driver_development_kit_paths():
|
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||||
"""Provides a list of all installation paths
|
"""Provides a list of all installation paths
|
||||||
for the WDK by version and architecture
|
for the WDK by version and architecture
|
||||||
"""
|
"""
|
||||||
@@ -303,7 +339,7 @@ def find_windows_driver_development_kit_paths():
|
|||||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
|
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_reg_installed_roots_paths():
|
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||||
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
|
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
|
||||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||||
@@ -316,7 +352,7 @@ def find_windows_kit_reg_installed_roots_paths():
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_reg_sdk_paths():
|
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||||
% WindowsKitExternalPaths.plat_major_ver,
|
% WindowsKitExternalPaths.plat_major_ver,
|
||||||
@@ -330,7 +366,7 @@ def find_windows_kit_reg_sdk_paths():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def find_win32_additional_install_paths():
|
def find_win32_additional_install_paths() -> List[str]:
|
||||||
"""Not all programs on Windows live on the PATH
|
"""Not all programs on Windows live on the PATH
|
||||||
Return a list of other potential install locations.
|
Return a list of other potential install locations.
|
||||||
"""
|
"""
|
||||||
@@ -357,13 +393,12 @@ def find_win32_additional_install_paths():
|
|||||||
return windows_search_ext
|
return windows_search_ext
|
||||||
|
|
||||||
|
|
||||||
def compute_windows_program_path_for_package(pkg):
|
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
"""Given a package, attempt to compute its Windows
|
"""Given a package, attempts to compute its Windows program files location,
|
||||||
program files location, return list of best guesses
|
and returns the list of best guesses.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg (spack.package_base.PackageBase): package for which
|
pkg: package for which Program Files location is to be computed
|
||||||
Program Files location is to be computed
|
|
||||||
"""
|
"""
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
return []
|
return []
|
||||||
@@ -378,7 +413,7 @@ def compute_windows_program_path_for_package(pkg):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def compute_windows_user_path_for_package(pkg):
|
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
"""Given a package attempt to compute its user scoped
|
"""Given a package attempt to compute its user scoped
|
||||||
install location, return list of potential locations based
|
install location, return list of potential locations based
|
||||||
on common heuristics. For more info on Windows user specific
|
on common heuristics. For more info on Windows user specific
|
||||||
|
|||||||
@@ -6,11 +6,13 @@
|
|||||||
and running executables.
|
and running executables.
|
||||||
"""
|
"""
|
||||||
import collections
|
import collections
|
||||||
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Dict, List, Optional, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
@@ -18,7 +20,7 @@
|
|||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.ld_so_conf
|
import spack.util.ld_so_conf
|
||||||
|
|
||||||
from .common import ( # find_windows_compiler_bundled_packages,
|
from .common import (
|
||||||
DetectedPackage,
|
DetectedPackage,
|
||||||
WindowsCompilerExternalPaths,
|
WindowsCompilerExternalPaths,
|
||||||
WindowsKitExternalPaths,
|
WindowsKitExternalPaths,
|
||||||
@@ -31,8 +33,13 @@
|
|||||||
path_to_dict,
|
path_to_dict,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
#: Timeout used for package detection (seconds)
|
||||||
|
DETECTION_TIMEOUT = 60
|
||||||
|
if sys.platform == "win32":
|
||||||
|
DETECTION_TIMEOUT = 120
|
||||||
|
|
||||||
def common_windows_package_paths():
|
|
||||||
|
def common_windows_package_paths() -> List[str]:
|
||||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||||
paths.extend(find_win32_additional_install_paths())
|
paths.extend(find_win32_additional_install_paths())
|
||||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||||
@@ -41,7 +48,7 @@ def common_windows_package_paths():
|
|||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
def executables_in_path(path_hints):
|
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||||
"""Get the paths of all executables available from the current PATH.
|
"""Get the paths of all executables available from the current PATH.
|
||||||
|
|
||||||
For convenience, this is constructed as a dictionary where the keys are
|
For convenience, this is constructed as a dictionary where the keys are
|
||||||
@@ -52,7 +59,7 @@ def executables_in_path(path_hints):
|
|||||||
assumed there are two different instances of the executable.
|
assumed there are two different instances of the executable.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path_hints (list): list of paths to be searched. If None the list will be
|
path_hints: list of paths to be searched. If None the list will be
|
||||||
constructed based on the PATH environment variable.
|
constructed based on the PATH environment variable.
|
||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
@@ -61,7 +68,9 @@ def executables_in_path(path_hints):
|
|||||||
return path_to_dict(search_paths)
|
return path_to_dict(search_paths)
|
||||||
|
|
||||||
|
|
||||||
def libraries_in_ld_and_system_library_path(path_hints=None):
|
def libraries_in_ld_and_system_library_path(
|
||||||
|
path_hints: Optional[List[str]] = None,
|
||||||
|
) -> Dict[str, str]:
|
||||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||||
standard system library paths.
|
standard system library paths.
|
||||||
@@ -74,7 +83,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
|||||||
assumed there are two different instances of the library.
|
assumed there are two different instances of the library.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path_hints (list): list of paths to be searched. If None the list will be
|
path_hints: list of paths to be searched. If None the list will be
|
||||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||||
variables as well as the standard system library paths.
|
variables as well as the standard system library paths.
|
||||||
@@ -90,7 +99,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
|||||||
return path_to_dict(search_paths)
|
return path_to_dict(search_paths)
|
||||||
|
|
||||||
|
|
||||||
def libraries_in_windows_paths(path_hints):
|
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||||
@@ -106,218 +115,250 @@ def libraries_in_windows_paths(path_hints):
|
|||||||
return path_to_dict(search_paths)
|
return path_to_dict(search_paths)
|
||||||
|
|
||||||
|
|
||||||
def _group_by_prefix(paths):
|
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||||
groups = collections.defaultdict(set)
|
groups = collections.defaultdict(set)
|
||||||
for p in paths:
|
for p in paths:
|
||||||
groups[os.path.dirname(p)].add(p)
|
groups[os.path.dirname(p)].add(p)
|
||||||
return groups.items()
|
return groups
|
||||||
|
|
||||||
|
|
||||||
# TODO consolidate this with by_executable
|
class Finder:
|
||||||
# Packages should be able to define both .libraries and .executables in the future
|
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||||
# determine_spec_details should get all relevant libraries and executables in one call
|
|
||||||
def by_library(packages_to_check, path_hints=None):
|
|
||||||
# Techniques for finding libraries is determined on a per recipe basis in
|
|
||||||
# the determine_version class method. Some packages will extract the
|
|
||||||
# version number from a shared libraries filename.
|
|
||||||
# Other libraries could use the strings function to extract it as described
|
|
||||||
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
|
|
||||||
"""Return the list of packages that have been detected on the system,
|
|
||||||
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
|
|
||||||
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
|
|
||||||
|
|
||||||
Args:
|
def path_hints(
|
||||||
packages_to_check (list): list of packages to be detected
|
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||||
path_hints (list): list of paths to be searched. If None the list will be
|
) -> List[str]:
|
||||||
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
|
"""Returns the list of paths to be searched.
|
||||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
|
|
||||||
and standard system library paths.
|
|
||||||
"""
|
|
||||||
# If no path hints from command line, intialize to empty list so
|
|
||||||
# we can add default hints on a per package basis
|
|
||||||
path_hints = [] if path_hints is None else path_hints
|
|
||||||
|
|
||||||
lib_pattern_to_pkgs = collections.defaultdict(list)
|
Args:
|
||||||
for pkg in packages_to_check:
|
pkg: package being detected
|
||||||
if hasattr(pkg, "libraries"):
|
initial_guess: initial list of paths from caller
|
||||||
for lib in pkg.libraries:
|
"""
|
||||||
lib_pattern_to_pkgs[lib].append(pkg)
|
result = initial_guess or []
|
||||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
result.extend(compute_windows_user_path_for_package(pkg))
|
||||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
result.extend(compute_windows_program_path_for_package(pkg))
|
||||||
|
return result
|
||||||
|
|
||||||
path_to_lib_name = (
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
"""Returns the list of patterns used to match candidate files.
|
||||||
if sys.platform != "win32"
|
|
||||||
else libraries_in_windows_paths(path_hints)
|
|
||||||
)
|
|
||||||
|
|
||||||
pkg_to_found_libs = collections.defaultdict(set)
|
Args:
|
||||||
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
|
pkg: package being detected
|
||||||
compiled_re = re.compile(lib_pattern)
|
"""
|
||||||
for path, lib in path_to_lib_name.items():
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
if compiled_re.search(lib):
|
|
||||||
for pkg in pkgs:
|
|
||||||
pkg_to_found_libs[pkg].add(path)
|
|
||||||
|
|
||||||
pkg_to_entries = collections.defaultdict(list)
|
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||||
resolved_specs = {} # spec -> lib found for the spec
|
"""Returns a list of candidate files found on the system.
|
||||||
|
|
||||||
for pkg, libs in pkg_to_found_libs.items():
|
Args:
|
||||||
|
patterns: search patterns to be used for matching files
|
||||||
|
paths: paths where to search for files
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
def prefix_from_path(self, *, path: str) -> str:
|
||||||
|
"""Given a path where a file was found, returns the corresponding prefix.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: path of a detected file
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
def detect_specs(
|
||||||
|
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||||
|
) -> List[DetectedPackage]:
|
||||||
|
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg: package being detected
|
||||||
|
paths: files matching the package search patterns
|
||||||
|
"""
|
||||||
if not hasattr(pkg, "determine_spec_details"):
|
if not hasattr(pkg, "determine_spec_details"):
|
||||||
llnl.util.tty.warn(
|
warnings.warn(
|
||||||
"{0} must define 'determine_spec_details' in order"
|
f"{pkg.name} must define 'determine_spec_details' in order"
|
||||||
" for Spack to detect externally-provided instances"
|
f" for Spack to detect externally-provided instances"
|
||||||
" of the package.".format(pkg.name)
|
f" of the package."
|
||||||
)
|
)
|
||||||
continue
|
return []
|
||||||
|
|
||||||
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
|
result = []
|
||||||
try:
|
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
|
|
||||||
except Exception as e:
|
|
||||||
specs = []
|
|
||||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
|
||||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
|
||||||
|
|
||||||
if not specs:
|
|
||||||
llnl.util.tty.debug(
|
|
||||||
"The following libraries in {0} were decidedly not "
|
|
||||||
"part of the package {1}: {2}".format(
|
|
||||||
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for spec in specs:
|
|
||||||
pkg_prefix = library_prefix(prefix)
|
|
||||||
|
|
||||||
if not pkg_prefix:
|
|
||||||
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
|
|
||||||
"add it as a Spack package"
|
|
||||||
llnl.util.tty.debug(msg.format(prefix))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if spec in resolved_specs:
|
|
||||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
|
||||||
|
|
||||||
llnl.util.tty.debug(
|
|
||||||
"Libraries in {0} and {1} are both associated"
|
|
||||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
resolved_specs[spec] = prefix
|
|
||||||
|
|
||||||
try:
|
|
||||||
spec.validate_detection()
|
|
||||||
except Exception as e:
|
|
||||||
msg = (
|
|
||||||
'"{0}" has been detected on the system but will '
|
|
||||||
"not be added to packages.yaml [reason={1}]"
|
|
||||||
)
|
|
||||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if spec.external_path:
|
|
||||||
pkg_prefix = spec.external_path
|
|
||||||
|
|
||||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
|
||||||
|
|
||||||
return pkg_to_entries
|
|
||||||
|
|
||||||
|
|
||||||
def by_executable(packages_to_check, path_hints=None):
|
|
||||||
"""Return the list of packages that have been detected on the system,
|
|
||||||
searching by path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
packages_to_check (list): list of package classes to be detected
|
|
||||||
path_hints (list): list of paths to be searched. If None the list will be
|
|
||||||
constructed based on the PATH environment variable.
|
|
||||||
"""
|
|
||||||
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
|
||||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
|
||||||
for pkg in packages_to_check:
|
|
||||||
if hasattr(pkg, "executables"):
|
|
||||||
for exe in pkg.platform_executables():
|
|
||||||
exe_pattern_to_pkgs[exe].append(pkg)
|
|
||||||
# Add Windows specific, package related paths to the search paths
|
|
||||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
|
||||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
|
||||||
|
|
||||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
|
||||||
pkg_to_found_exes = collections.defaultdict(set)
|
|
||||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
|
||||||
compiled_re = re.compile(exe_pattern)
|
|
||||||
for path, exe in path_to_exe_name.items():
|
|
||||||
if compiled_re.search(exe):
|
|
||||||
for pkg in pkgs:
|
|
||||||
pkg_to_found_exes[pkg].add(path)
|
|
||||||
|
|
||||||
pkg_to_entries = collections.defaultdict(list)
|
|
||||||
resolved_specs = {} # spec -> exe found for the spec
|
|
||||||
|
|
||||||
for pkg, exes in pkg_to_found_exes.items():
|
|
||||||
if not hasattr(pkg, "determine_spec_details"):
|
|
||||||
llnl.util.tty.warn(
|
|
||||||
"{0} must define 'determine_spec_details' in order"
|
|
||||||
" for Spack to detect externally-provided instances"
|
|
||||||
" of the package.".format(pkg.name)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
|
||||||
# TODO: multiple instances of a package can live in the same
|
# TODO: multiple instances of a package can live in the same
|
||||||
# prefix, and a package implementation can return multiple specs
|
# prefix, and a package implementation can return multiple specs
|
||||||
# for one prefix, but without additional details (e.g. about the
|
# for one prefix, but without additional details (e.g. about the
|
||||||
# naming scheme which differentiates them), the spec won't be
|
# naming scheme which differentiates them), the spec won't be
|
||||||
# usable.
|
# usable.
|
||||||
try:
|
try:
|
||||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
|
specs = _convert_to_iterable(
|
||||||
|
pkg.determine_spec_details(candidate_path, items_in_prefix)
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
specs = []
|
specs = []
|
||||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
warnings.warn(
|
||||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||||
|
|
||||||
if not specs:
|
|
||||||
llnl.util.tty.debug(
|
|
||||||
"The following executables in {0} were decidedly not "
|
|
||||||
"part of the package {1}: {2}".format(
|
|
||||||
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for spec in specs:
|
if not specs:
|
||||||
pkg_prefix = executable_prefix(prefix)
|
files = ", ".join(_convert_to_iterable(items_in_prefix))
|
||||||
|
llnl.util.tty.debug(
|
||||||
|
f"The following files in {candidate_path} were decidedly not "
|
||||||
|
f"part of the package {pkg.name}: {files}"
|
||||||
|
)
|
||||||
|
|
||||||
if not pkg_prefix:
|
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
for spec in specs:
|
||||||
llnl.util.tty.debug(msg.format(prefix))
|
prefix = self.prefix_from_path(path=candidate_path)
|
||||||
|
if not prefix:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if spec in resolved_specs:
|
if spec in resolved_specs:
|
||||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||||
|
|
||||||
llnl.util.tty.debug(
|
llnl.util.tty.debug(
|
||||||
"Executables in {0} and {1} are both associated"
|
f"Files in {candidate_path} and {prior_prefix} are both associated"
|
||||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
f" with the same spec {str(spec)}"
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
resolved_specs[spec] = prefix
|
|
||||||
|
|
||||||
|
resolved_specs[spec] = candidate_path
|
||||||
try:
|
try:
|
||||||
spec.validate_detection()
|
spec.validate_detection()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = (
|
msg = (
|
||||||
'"{0}" has been detected on the system but will '
|
f'"{spec}" has been detected on the system but will '
|
||||||
"not be added to packages.yaml [reason={1}]"
|
f"not be added to packages.yaml [reason={str(e)}]"
|
||||||
)
|
)
|
||||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
warnings.warn(msg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if spec.external_path:
|
if spec.external_path:
|
||||||
pkg_prefix = spec.external_path
|
prefix = spec.external_path
|
||||||
|
|
||||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||||
|
|
||||||
return pkg_to_entries
|
return result
|
||||||
|
|
||||||
|
def find(
|
||||||
|
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||||
|
) -> List[DetectedPackage]:
|
||||||
|
"""For a given package, returns a list of detected specs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg_name: package being detected
|
||||||
|
initial_guess: initial list of paths to search from the caller
|
||||||
|
"""
|
||||||
|
import spack.repo
|
||||||
|
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
patterns = self.search_patterns(pkg=pkg_cls)
|
||||||
|
if not patterns:
|
||||||
|
return []
|
||||||
|
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||||
|
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||||
|
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ExecutablesFinder(Finder):
|
||||||
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
|
result = []
|
||||||
|
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||||
|
result = pkg.platform_executables()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||||
|
executables_by_path = executables_in_path(path_hints=paths)
|
||||||
|
patterns = [re.compile(x) for x in patterns]
|
||||||
|
result = []
|
||||||
|
for compiled_re in patterns:
|
||||||
|
for path, exe in executables_by_path.items():
|
||||||
|
if compiled_re.search(exe):
|
||||||
|
result.append(path)
|
||||||
|
return list(sorted(set(result)))
|
||||||
|
|
||||||
|
def prefix_from_path(self, *, path: str) -> str:
|
||||||
|
result = executable_prefix(path)
|
||||||
|
if not result:
|
||||||
|
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
|
||||||
|
llnl.util.tty.debug(msg)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class LibrariesFinder(Finder):
|
||||||
|
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||||
|
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||||
|
"""
|
||||||
|
|
||||||
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
|
result = []
|
||||||
|
if hasattr(pkg, "libraries"):
|
||||||
|
result = pkg.libraries
|
||||||
|
return result
|
||||||
|
|
||||||
|
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||||
|
libraries_by_path = (
|
||||||
|
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||||
|
if sys.platform != "win32"
|
||||||
|
else libraries_in_windows_paths(paths)
|
||||||
|
)
|
||||||
|
patterns = [re.compile(x) for x in patterns]
|
||||||
|
result = []
|
||||||
|
for compiled_re in patterns:
|
||||||
|
for path, exe in libraries_by_path.items():
|
||||||
|
if compiled_re.search(exe):
|
||||||
|
result.append(path)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def prefix_from_path(self, *, path: str) -> str:
|
||||||
|
result = library_prefix(path)
|
||||||
|
if not result:
|
||||||
|
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
|
||||||
|
llnl.util.tty.debug(msg)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def by_path(
|
||||||
|
packages_to_search: List[str],
|
||||||
|
*,
|
||||||
|
path_hints: Optional[List[str]] = None,
|
||||||
|
max_workers: Optional[int] = None,
|
||||||
|
) -> Dict[str, List[DetectedPackage]]:
|
||||||
|
"""Return the list of packages that have been detected on the system,
|
||||||
|
searching by path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
packages_to_search: list of package classes to be detected
|
||||||
|
path_hints: initial list of paths to be searched
|
||||||
|
"""
|
||||||
|
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||||
|
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||||
|
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||||
|
|
||||||
|
executables_path_guess = (
|
||||||
|
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||||
|
)
|
||||||
|
libraries_path_guess = [] if path_hints is None else path_hints
|
||||||
|
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||||
|
|
||||||
|
result = collections.defaultdict(list)
|
||||||
|
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||||
|
for pkg in packages_to_search:
|
||||||
|
executable_future = executor.submit(
|
||||||
|
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||||
|
)
|
||||||
|
library_future = executor.submit(
|
||||||
|
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||||
|
)
|
||||||
|
detected_specs_by_package[pkg] = executable_future, library_future
|
||||||
|
|
||||||
|
for pkg_name, futures in detected_specs_by_package.items():
|
||||||
|
for future in futures:
|
||||||
|
try:
|
||||||
|
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||||
|
if detected:
|
||||||
|
result[pkg_name].extend(detected)
|
||||||
|
except Exception:
|
||||||
|
llnl.util.tty.debug(
|
||||||
|
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ class OpenMpi(Package):
|
|||||||
import spack.patch
|
import spack.patch
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.url
|
import spack.url
|
||||||
|
import spack.util.crypto
|
||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||||
from spack.fetch_strategy import from_kwargs
|
from spack.fetch_strategy import from_kwargs
|
||||||
@@ -407,10 +408,7 @@ def version(
|
|||||||
|
|
||||||
def _execute_version(pkg, ver, **kwargs):
|
def _execute_version(pkg, ver, **kwargs):
|
||||||
if (
|
if (
|
||||||
any(
|
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||||
s in kwargs
|
|
||||||
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
|
|
||||||
)
|
|
||||||
and hasattr(pkg, "has_code")
|
and hasattr(pkg, "has_code")
|
||||||
and not pkg.has_code
|
and not pkg.has_code
|
||||||
):
|
):
|
||||||
@@ -760,7 +758,7 @@ def _execute_variant(pkg):
|
|||||||
when_spec = make_when_spec(when)
|
when_spec = make_when_spec(when)
|
||||||
when_specs = [when_spec]
|
when_specs = [when_spec]
|
||||||
|
|
||||||
if not re.match(spack.spec.identifier_re, name):
|
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||||
directive = "variant"
|
directive = "variant"
|
||||||
msg = "Invalid variant name in {0}: '{1}'"
|
msg = "Invalid variant name in {0}: '{1}'"
|
||||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
|||||||
|
|
||||||
projection = spack.projections.get_projection(self.projections, spec)
|
projection = spack.projections.get_projection(self.projections, spec)
|
||||||
path = spec.format(projection)
|
path = spec.format(projection)
|
||||||
return path
|
return str(Path(path))
|
||||||
|
|
||||||
def write_spec(self, spec, path):
|
def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
|
|||||||
@@ -1504,7 +1504,7 @@ def _concretize_separately(self, tests=False):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
max_processes = min(
|
max_processes = min(
|
||||||
len(arguments), # Number of specs
|
len(arguments), # Number of specs
|
||||||
spack.config.get("config:build_jobs"), # Cap on build jobs
|
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: revisit this print as soon as darwin is parallel too
|
# TODO: revisit this print as soon as darwin is parallel too
|
||||||
@@ -1994,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
|||||||
|
|
||||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
|
||||||
specs = [s.lookup_hash() for s in specs]
|
|
||||||
|
|
||||||
# Avoid double lookup by directly calling _satisfies.
|
|
||||||
return [
|
return [
|
||||||
s
|
s
|
||||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||||
if any(s._satisfies(t) for t in specs)
|
if any(s.satisfies(t) for t in specs)
|
||||||
]
|
]
|
||||||
|
|
||||||
@spack.repo.autospec
|
@spack.repo.autospec
|
||||||
@@ -2062,7 +2058,7 @@ def matching_spec(self, spec):
|
|||||||
# If multiple root specs match, it is assumed that the abstract
|
# If multiple root specs match, it is assumed that the abstract
|
||||||
# spec will most-succinctly summarize the difference between them
|
# spec will most-succinctly summarize the difference between them
|
||||||
# (and the user can enter one of these to disambiguate)
|
# (and the user can enter one of these to disambiguate)
|
||||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||||
color = clr.get_color_when()
|
color = clr.get_color_when()
|
||||||
match_strings = [
|
match_strings = [
|
||||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||||
@@ -2370,7 +2366,7 @@ def display_specs(concretized_specs):
|
|||||||
def _tree_to_display(spec):
|
def _tree_to_display(spec):
|
||||||
return spec.tree(
|
return spec.tree(
|
||||||
recurse_dependencies=True,
|
recurse_dependencies=True,
|
||||||
format=spack.spec.display_format,
|
format=spack.spec.DISPLAY_FORMAT,
|
||||||
status_fn=spack.spec.Spec.install_status,
|
status_fn=spack.spec.Spec.install_status,
|
||||||
hashlen=7,
|
hashlen=7,
|
||||||
hashes=True,
|
hashes=True,
|
||||||
@@ -2448,13 +2444,13 @@ def make_repo_path(root):
|
|||||||
def prepare_config_scope(env):
|
def prepare_config_scope(env):
|
||||||
"""Add env's scope to the global configuration search path."""
|
"""Add env's scope to the global configuration search path."""
|
||||||
for scope in env.config_scopes():
|
for scope in env.config_scopes():
|
||||||
spack.config.config.push_scope(scope)
|
spack.config.CONFIG.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
def deactivate_config_scope(env):
|
def deactivate_config_scope(env):
|
||||||
"""Remove any scopes from env from the global config path."""
|
"""Remove any scopes from env from the global config path."""
|
||||||
for scope in env.config_scopes():
|
for scope in env.config_scopes():
|
||||||
spack.config.config.remove_scope(scope.name)
|
spack.config.CONFIG.remove_scope(scope.name)
|
||||||
|
|
||||||
|
|
||||||
def manifest_file(env_name_or_dir):
|
def manifest_file(env_name_or_dir):
|
||||||
@@ -2668,6 +2664,26 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
|||||||
self.yaml_content = with_defaults_added
|
self.yaml_content = with_defaults_added
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
|
def _all_matches(self, user_spec: str) -> List[str]:
|
||||||
|
"""Maps the input string to the first equivalent user spec in the manifest,
|
||||||
|
and returns it.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_spec: user spec to be found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: if no equivalent match is found
|
||||||
|
"""
|
||||||
|
result = []
|
||||||
|
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||||
|
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||||
|
result.append(yaml_spec_str)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def add_user_spec(self, user_spec: str) -> None:
|
def add_user_spec(self, user_spec: str) -> None:
|
||||||
"""Appends the user spec passed as input to the list of root specs.
|
"""Appends the user spec passed as input to the list of root specs.
|
||||||
|
|
||||||
@@ -2688,8 +2704,9 @@ def remove_user_spec(self, user_spec: str) -> None:
|
|||||||
SpackEnvironmentError: when the user spec is not in the list
|
SpackEnvironmentError: when the user spec is not in the list
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.pristine_configuration["specs"].remove(user_spec)
|
for key in self._all_matches(user_spec):
|
||||||
self.configuration["specs"].remove(user_spec)
|
self.pristine_configuration["specs"].remove(key)
|
||||||
|
self.configuration["specs"].remove(key)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||||
raise SpackEnvironmentError(msg) from e
|
raise SpackEnvironmentError(msg) from e
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
|
|||||||
# TODO: despacktivate
|
# TODO: despacktivate
|
||||||
# TODO: prompt
|
# TODO: prompt
|
||||||
elif shell == "pwsh":
|
elif shell == "pwsh":
|
||||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||||
else:
|
else:
|
||||||
if "color" in os.getenv("TERM", "") and prompt:
|
if "color" in os.getenv("TERM", "") and prompt:
|
||||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||||
@@ -82,7 +82,7 @@ def deactivate_header(shell):
|
|||||||
# TODO: despacktivate
|
# TODO: despacktivate
|
||||||
# TODO: prompt
|
# TODO: prompt
|
||||||
elif shell == "pwsh":
|
elif shell == "pwsh":
|
||||||
cmds += "Remove-Item Env:SPACK_ENV"
|
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||||
else:
|
else:
|
||||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||||
|
|||||||
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
header = "%s{%s} / %s{%s}" % (
|
header = "%s{%s} / %s{%s}" % (
|
||||||
spack.spec.architecture_color,
|
spack.spec.ARCHITECTURE_COLOR,
|
||||||
architecture,
|
architecture,
|
||||||
spack.spec.compiler_color,
|
spack.spec.COMPILER_COLOR,
|
||||||
compiler,
|
compiler,
|
||||||
)
|
)
|
||||||
tty.hline(colorize(header), char="-")
|
tty.hline(colorize(header), char="-")
|
||||||
|
|||||||
@@ -90,6 +90,16 @@
|
|||||||
STATUS_REMOVED = "removed"
|
STATUS_REMOVED = "removed"
|
||||||
|
|
||||||
|
|
||||||
|
def _write_timer_json(pkg, timer, cache):
|
||||||
|
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||||
|
try:
|
||||||
|
with open(pkg.times_log_path, "w") as timelog:
|
||||||
|
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
|
except Exception as e:
|
||||||
|
tty.debug(str(e))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
class InstallAction:
|
class InstallAction:
|
||||||
#: Don't perform an install
|
#: Don't perform an install
|
||||||
NONE = 0
|
NONE = 0
|
||||||
@@ -399,6 +409,8 @@ def _install_from_cache(
|
|||||||
return False
|
return False
|
||||||
t.stop()
|
t.stop()
|
||||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||||
|
|
||||||
|
_write_timer_json(pkg, t, True)
|
||||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||||
_print_installed_pkg(pkg.spec.prefix)
|
_print_installed_pkg(pkg.spec.prefix)
|
||||||
spack.hooks.post_install(pkg.spec, explicit)
|
spack.hooks.post_install(pkg.spec, explicit)
|
||||||
@@ -481,7 +493,7 @@ def _process_binary_cache_tarball(
|
|||||||
|
|
||||||
with timer.measure("install"), spack.util.path.filter_padding():
|
with timer.measure("install"), spack.util.path.filter_padding():
|
||||||
binary_distribution.extract_tarball(
|
binary_distribution.extract_tarball(
|
||||||
pkg.spec, download_result, unsigned=unsigned, force=False
|
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
||||||
)
|
)
|
||||||
|
|
||||||
pkg.installed_from_binary_cache = True
|
pkg.installed_from_binary_cache = True
|
||||||
@@ -592,7 +604,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
if node is spec:
|
if node is spec:
|
||||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||||
elif source_pkg_dir:
|
elif source_pkg_dir:
|
||||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
fs.install_tree(
|
||||||
|
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||||
@@ -1316,7 +1330,6 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
|||||||
"""
|
"""
|
||||||
Check the database and leftover installation directories/files and
|
Check the database and leftover installation directories/files and
|
||||||
prepare for a new install attempt for an uninstalled package.
|
prepare for a new install attempt for an uninstalled package.
|
||||||
|
|
||||||
Preparation includes cleaning up installation and stage directories
|
Preparation includes cleaning up installation and stage directories
|
||||||
and ensuring the database is up-to-date.
|
and ensuring the database is up-to-date.
|
||||||
|
|
||||||
@@ -2092,7 +2105,6 @@ def install(self) -> None:
|
|||||||
# another process has a write lock so must be (un)installing
|
# another process has a write lock so must be (un)installing
|
||||||
# the spec (or that process is hung).
|
# the spec (or that process is hung).
|
||||||
ltype, lock = self._ensure_locked("read", pkg)
|
ltype, lock = self._ensure_locked("read", pkg)
|
||||||
|
|
||||||
# Requeue the spec if we cannot get at least a read lock so we
|
# Requeue the spec if we cannot get at least a read lock so we
|
||||||
# can check the status presumably established by another process
|
# can check the status presumably established by another process
|
||||||
# -- failed, installed, or uninstalled -- on the next pass.
|
# -- failed, installed, or uninstalled -- on the next pass.
|
||||||
@@ -2372,8 +2384,7 @@ def run(self) -> bool:
|
|||||||
|
|
||||||
# Stop the timer and save results
|
# Stop the timer and save results
|
||||||
self.timer.stop()
|
self.timer.stop()
|
||||||
with open(self.pkg.times_log_path, "w") as timelog:
|
_write_timer_json(self.pkg, self.timer, False)
|
||||||
self.timer.write_json(timelog)
|
|
||||||
|
|
||||||
print_install_test_log(self.pkg)
|
print_install_test_log(self.pkg)
|
||||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||||
@@ -2394,7 +2405,9 @@ def _install_source(self) -> None:
|
|||||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||||
|
|
||||||
fs.install_tree(pkg.stage.source_path, src_target)
|
fs.install_tree(
|
||||||
|
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||||
|
)
|
||||||
|
|
||||||
def _real_install(self) -> None:
|
def _real_install(self) -> None:
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
|||||||
@@ -51,7 +51,7 @@
|
|||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
|
|
||||||
#: top-level aliases for Spack commands
|
#: top-level aliases for Spack commands
|
||||||
aliases = {"rm": "remove"}
|
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
|
||||||
|
|
||||||
#: help levels in order of detail (i.e., number of commands shown)
|
#: help levels in order of detail (i.e., number of commands shown)
|
||||||
levels = ["short", "long"]
|
levels = ["short", "long"]
|
||||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
|||||||
|
|
||||||
key = syaml.syaml_str("repos")
|
key = syaml.syaml_str("repos")
|
||||||
key.override = True
|
key.override = True
|
||||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||||
[(key, [spack.paths.mock_packages_path])]
|
[(key, [spack.paths.mock_packages_path])]
|
||||||
)
|
)
|
||||||
spack.repo.PATH = spack.repo.create(spack.config.config)
|
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||||
|
|
||||||
# If the user asked for it, don't check ssl certs.
|
# If the user asked for it, don't check ssl certs.
|
||||||
if args.insecure:
|
if args.insecure:
|
||||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
|||||||
|
|
||||||
# make spack.config aware of any command line configuration scopes
|
# make spack.config aware of any command line configuration scopes
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
spack.config.command_line_scopes = args.config_scopes
|
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||||
|
|
||||||
# ensure options on spack command come before everything
|
# ensure options on spack command come before everything
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
|
|||||||
if spec.satisfies(constraint):
|
if spec.satisfies(constraint):
|
||||||
if hasattr(constraint, "override") and constraint.override:
|
if hasattr(constraint, "override") and constraint.override:
|
||||||
spec_configuration = {}
|
spec_configuration = {}
|
||||||
update_dictionary_extending_lists(spec_configuration, action)
|
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
|
||||||
|
|
||||||
# Transform keywords for dependencies or prerequisites into a list of spec
|
# Transform keywords for dependencies or prerequisites into a list of spec
|
||||||
|
|
||||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Check if we have custom TCL module sections
|
# Check if we have custom TCL module sections
|
||||||
for scope in spack.config.config.file_scopes:
|
for scope in spack.config.CONFIG.file_scopes:
|
||||||
# Skip default configuration
|
# Skip default configuration
|
||||||
if scope.name.startswith("default"):
|
if scope.name.startswith("default"):
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
|||||||
|
|
||||||
# Create a multimethod with this name if there is not one already
|
# Create a multimethod with this name if there is not one already
|
||||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||||
if not type(original_method) == SpecMultiMethod:
|
if not isinstance(original_method, SpecMultiMethod):
|
||||||
original_method = SpecMultiMethod(original_method)
|
original_method = SpecMultiMethod(original_method)
|
||||||
|
|
||||||
if self.spec is not None:
|
if self.spec is not None:
|
||||||
|
|||||||
@@ -96,6 +96,7 @@
|
|||||||
on_package_attributes,
|
on_package_attributes,
|
||||||
)
|
)
|
||||||
from spack.spec import InvalidSpecDetected, Spec
|
from spack.spec import InvalidSpecDetected, Spec
|
||||||
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.executable import *
|
from spack.util.executable import *
|
||||||
from spack.variant import (
|
from spack.variant import (
|
||||||
any_combination_of,
|
any_combination_of,
|
||||||
|
|||||||
@@ -180,6 +180,8 @@ class DetectablePackageMeta:
|
|||||||
for the detection function.
|
for the detection function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
TAG = "detectable"
|
||||||
|
|
||||||
def __init__(cls, name, bases, attr_dict):
|
def __init__(cls, name, bases, attr_dict):
|
||||||
if hasattr(cls, "executables") and hasattr(cls, "libraries"):
|
if hasattr(cls, "executables") and hasattr(cls, "libraries"):
|
||||||
msg = "a package can have either an 'executables' or 'libraries' attribute"
|
msg = "a package can have either an 'executables' or 'libraries' attribute"
|
||||||
@@ -195,6 +197,11 @@ def __init__(cls, name, bases, attr_dict):
|
|||||||
# If a package has the executables or libraries attribute then it's
|
# If a package has the executables or libraries attribute then it's
|
||||||
# assumed to be detectable
|
# assumed to be detectable
|
||||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||||
|
# Append a tag to each detectable package, so that finding them is faster
|
||||||
|
if hasattr(cls, "tags"):
|
||||||
|
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||||
|
else:
|
||||||
|
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def platform_executables(cls):
|
def platform_executables(cls):
|
||||||
|
|||||||
@@ -288,9 +288,6 @@ def next_spec(
|
|||||||
)
|
)
|
||||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||||
|
|
||||||
if root_spec.concrete:
|
|
||||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
|
||||||
|
|
||||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -306,13 +303,12 @@ def all_specs(self) -> List[spack.spec.Spec]:
|
|||||||
class SpecNodeParser:
|
class SpecNodeParser:
|
||||||
"""Parse a single spec node from a stream of tokens"""
|
"""Parse a single spec node from a stream of tokens"""
|
||||||
|
|
||||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
__slots__ = "ctx", "has_compiler", "has_version"
|
||||||
|
|
||||||
def __init__(self, ctx):
|
def __init__(self, ctx):
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
self.has_compiler = False
|
self.has_compiler = False
|
||||||
self.has_version = False
|
self.has_version = False
|
||||||
self.has_hash = False
|
|
||||||
|
|
||||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||||
"""Parse a single spec node from a stream of tokens
|
"""Parse a single spec node from a stream of tokens
|
||||||
@@ -343,7 +339,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(TokenType.COMPILER):
|
if self.ctx.accept(TokenType.COMPILER):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
raise spack.spec.DuplicateCompilerSpecError(
|
raise spack.spec.DuplicateCompilerSpecError(
|
||||||
f"{initial_spec} cannot have multiple compilers"
|
f"{initial_spec} cannot have multiple compilers"
|
||||||
@@ -353,7 +348,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
|||||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||||
self.has_compiler = True
|
self.has_compiler = True
|
||||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
raise spack.spec.DuplicateCompilerSpecError(
|
raise spack.spec.DuplicateCompilerSpecError(
|
||||||
f"{initial_spec} cannot have multiple compilers"
|
f"{initial_spec} cannot have multiple compilers"
|
||||||
@@ -367,7 +361,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
|||||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||||
TokenType.VERSION_HASH_PAIR
|
TokenType.VERSION_HASH_PAIR
|
||||||
):
|
):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
if self.has_version:
|
if self.has_version:
|
||||||
raise spack.spec.MultipleVersionError(
|
raise spack.spec.MultipleVersionError(
|
||||||
f"{initial_spec} cannot have multiple versions"
|
f"{initial_spec} cannot have multiple versions"
|
||||||
@@ -378,25 +371,21 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
|||||||
initial_spec.attach_git_version_lookup()
|
initial_spec.attach_git_version_lookup()
|
||||||
self.has_version = True
|
self.has_version = True
|
||||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
variant_value = self.ctx.current_token.value[0] == "+"
|
variant_value = self.ctx.current_token.value[0] == "+"
|
||||||
initial_spec._add_flag(
|
initial_spec._add_flag(
|
||||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||||
)
|
)
|
||||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||||
initial_spec._add_flag(
|
initial_spec._add_flag(
|
||||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||||
)
|
)
|
||||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||||
name = name.strip("'\" ")
|
name = name.strip("'\" ")
|
||||||
value = value.strip("'\" ")
|
value = value.strip("'\" ")
|
||||||
initial_spec._add_flag(name, value, propagate=False)
|
initial_spec._add_flag(name, value, propagate=False)
|
||||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
|
||||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||||
name = name.strip("'\" ")
|
name = name.strip("'\" ")
|
||||||
value = value.strip("'\" ")
|
value = value.strip("'\" ")
|
||||||
@@ -411,12 +400,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
|||||||
|
|
||||||
return initial_spec
|
return initial_spec
|
||||||
|
|
||||||
def hash_not_parsed_or_raise(self, spec, addition):
|
|
||||||
if not self.has_hash:
|
|
||||||
return
|
|
||||||
|
|
||||||
raise spack.spec.RedundantSpecError(spec, addition)
|
|
||||||
|
|
||||||
|
|
||||||
class FileParser:
|
class FileParser:
|
||||||
"""Parse a single spec from a JSON or YAML file"""
|
"""Parse a single spec from a JSON or YAML file"""
|
||||||
|
|||||||
@@ -10,11 +10,12 @@
|
|||||||
dependencies.
|
dependencies.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
|
from pathlib import PurePath
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
|
|
||||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||||
prefix = llnl.util.filesystem.ancestor(__file__, 4)
|
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||||
|
|
||||||
#: synonym for prefix
|
#: synonym for prefix
|
||||||
spack_root = prefix
|
spack_root = prefix
|
||||||
@@ -88,7 +89,7 @@ def _get_user_cache_path():
|
|||||||
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
||||||
|
|
||||||
|
|
||||||
user_cache_path = _get_user_cache_path()
|
user_cache_path = str(PurePath(_get_user_cache_path()))
|
||||||
|
|
||||||
#: junit, cdash, etc. reports about builds
|
#: junit, cdash, etc. reports about builds
|
||||||
reports_path = os.path.join(user_cache_path, "reports")
|
reports_path = os.path.join(user_cache_path, "reports")
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ def use_platform(new_platform):
|
|||||||
host = _PickleableCallable(new_platform)
|
host = _PickleableCallable(new_platform)
|
||||||
|
|
||||||
# Clear configuration and compiler caches
|
# Clear configuration and compiler caches
|
||||||
spack.config.config.clear_caches()
|
spack.config.CONFIG.clear_caches()
|
||||||
spack.compilers._cache_config_files = []
|
spack.compilers._cache_config_files = []
|
||||||
|
|
||||||
yield new_platform
|
yield new_platform
|
||||||
@@ -73,5 +73,5 @@ def use_platform(new_platform):
|
|||||||
host = original_host_fn
|
host = original_host_fn
|
||||||
|
|
||||||
# Clear configuration and compiler caches
|
# Clear configuration and compiler caches
|
||||||
spack.config.config.clear_caches()
|
spack.config.CONFIG.clear_caches()
|
||||||
spack.compilers._cache_config_files = []
|
spack.compilers._cache_config_files = []
|
||||||
|
|||||||
@@ -139,6 +139,8 @@ def craype_type_and_version(cls):
|
|||||||
# If no default version, sort available versions and return latest
|
# If no default version, sort available versions and return latest
|
||||||
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
||||||
versions_available.sort(reverse=True)
|
versions_available.sort(reverse=True)
|
||||||
|
if not versions_available:
|
||||||
|
return (craype_type, None)
|
||||||
return (craype_type, versions_available[0])
|
return (craype_type, versions_available[0])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
|||||||
|
|
||||||
# Warn about invalid names that look like packages.
|
# Warn about invalid names that look like packages.
|
||||||
if not nm.valid_module_name(pkg_name):
|
if not nm.valid_module_name(pkg_name):
|
||||||
if not pkg_name.startswith("."):
|
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||||
tty.warn(
|
tty.warn(
|
||||||
'Skipping package at {0}. "{1}" is not '
|
'Skipping package at {0}. "{1}" is not '
|
||||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||||
@@ -647,7 +647,7 @@ class RepoPath:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *repos, **kwargs):
|
def __init__(self, *repos, **kwargs):
|
||||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||||
self.repos = []
|
self.repos = []
|
||||||
self.by_namespace = nm.NamespaceTrie()
|
self.by_namespace = nm.NamespaceTrie()
|
||||||
|
|
||||||
@@ -966,7 +966,7 @@ def check(condition, msg):
|
|||||||
|
|
||||||
# Indexes for this repository, computed lazily
|
# Indexes for this repository, computed lazily
|
||||||
self._repo_index = None
|
self._repo_index = None
|
||||||
self._cache = cache or spack.caches.misc_cache
|
self._cache = cache or spack.caches.MISC_CACHE
|
||||||
|
|
||||||
def real_name(self, import_name):
|
def real_name(self, import_name):
|
||||||
"""Allow users to import Spack packages using Python identifiers.
|
"""Allow users to import Spack packages using Python identifiers.
|
||||||
@@ -1357,7 +1357,7 @@ def create_or_construct(path, namespace=None):
|
|||||||
|
|
||||||
def _path(configuration=None):
|
def _path(configuration=None):
|
||||||
"""Get the singleton RepoPath instance for Spack."""
|
"""Get the singleton RepoPath instance for Spack."""
|
||||||
configuration = configuration or spack.config.config
|
configuration = configuration or spack.config.CONFIG
|
||||||
return create(configuration=configuration)
|
return create(configuration=configuration)
|
||||||
|
|
||||||
|
|
||||||
@@ -1404,14 +1404,14 @@ def use_repositories(*paths_and_repos, **kwargs):
|
|||||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||||
repos_key = "repos:" if override else "repos"
|
repos_key = "repos:" if override else "repos"
|
||||||
spack.config.config.push_scope(
|
spack.config.CONFIG.push_scope(
|
||||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||||
)
|
)
|
||||||
PATH, saved = create(configuration=spack.config.config), PATH
|
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||||
try:
|
try:
|
||||||
yield PATH
|
yield PATH
|
||||||
finally:
|
finally:
|
||||||
spack.config.config.remove_scope(scope_name=scope_name)
|
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||||
PATH = saved
|
PATH = saved
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,12 @@
|
|||||||
"unify": {
|
"unify": {
|
||||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||||
},
|
},
|
||||||
|
"duplicates": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
159
lib/spack/spack/solver/counter.py
Normal file
159
lib/spack/spack/solver/counter.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections
|
||||||
|
from typing import List, Set, Tuple
|
||||||
|
|
||||||
|
import spack.dependency
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
PossibleDependencies = Set[str]
|
||||||
|
|
||||||
|
|
||||||
|
class Counter:
|
||||||
|
"""Computes the possible packages and the maximum number of duplicates
|
||||||
|
allowed for each of them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
specs: abstract specs to concretize
|
||||||
|
tests: if True, add test dependencies to the list of possible packages
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||||
|
self.specs = specs
|
||||||
|
|
||||||
|
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
|
||||||
|
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
|
||||||
|
if not tests:
|
||||||
|
self.link_run_types = ("link", "run")
|
||||||
|
self.all_types = ("link", "run", "build")
|
||||||
|
|
||||||
|
self._possible_dependencies: PossibleDependencies = set()
|
||||||
|
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||||
|
|
||||||
|
def possible_dependencies(self) -> PossibleDependencies:
|
||||||
|
"""Returns the list of possible dependencies"""
|
||||||
|
self.ensure_cache_values()
|
||||||
|
return self._possible_dependencies
|
||||||
|
|
||||||
|
def possible_virtuals(self) -> Set[str]:
|
||||||
|
"""Returns the list of possible virtuals"""
|
||||||
|
self.ensure_cache_values()
|
||||||
|
return self._possible_virtuals
|
||||||
|
|
||||||
|
def ensure_cache_values(self) -> None:
|
||||||
|
"""Ensure the cache values have been computed"""
|
||||||
|
if self._possible_dependencies:
|
||||||
|
return
|
||||||
|
self._compute_cache_values()
|
||||||
|
|
||||||
|
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
|
||||||
|
"""Emit facts associated with the possible packages"""
|
||||||
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
def _compute_cache_values(self):
|
||||||
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
|
||||||
|
class NoDuplicatesCounter(Counter):
|
||||||
|
def _compute_cache_values(self):
|
||||||
|
result = spack.package_base.possible_dependencies(
|
||||||
|
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||||
|
)
|
||||||
|
self._possible_dependencies = set(result)
|
||||||
|
|
||||||
|
def possible_packages_facts(self, gen, fn):
|
||||||
|
gen.h2("Maximum number of nodes (packages)")
|
||||||
|
for package_name in sorted(self.possible_dependencies()):
|
||||||
|
gen.fact(fn.max_dupes(package_name, 1))
|
||||||
|
gen.newline()
|
||||||
|
gen.h2("Maximum number of nodes (virtual packages)")
|
||||||
|
for package_name in sorted(self.possible_virtuals()):
|
||||||
|
gen.fact(fn.max_dupes(package_name, 1))
|
||||||
|
gen.newline()
|
||||||
|
gen.h2("Possible package in link-run subDAG")
|
||||||
|
for name in sorted(self.possible_dependencies()):
|
||||||
|
gen.fact(fn.possible_in_link_run(name))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
|
||||||
|
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||||
|
def __init__(self, specs, tests):
|
||||||
|
super().__init__(specs, tests)
|
||||||
|
self._link_run: PossibleDependencies = set()
|
||||||
|
self._direct_build: PossibleDependencies = set()
|
||||||
|
self._total_build: PossibleDependencies = set()
|
||||||
|
self._link_run_virtuals: Set[str] = set()
|
||||||
|
|
||||||
|
def _compute_cache_values(self):
|
||||||
|
self._link_run = set(
|
||||||
|
spack.package_base.possible_dependencies(
|
||||||
|
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._link_run_virtuals.update(self._possible_virtuals)
|
||||||
|
for x in self._link_run:
|
||||||
|
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
|
||||||
|
self._direct_build.update(current)
|
||||||
|
|
||||||
|
self._total_build = set(
|
||||||
|
spack.package_base.possible_dependencies(
|
||||||
|
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||||
|
|
||||||
|
def possible_packages_facts(self, gen, fn):
|
||||||
|
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||||
|
gen.h2("Packages with at most a single node")
|
||||||
|
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||||
|
gen.fact(fn.max_dupes(package_name, 1))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||||
|
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||||
|
gen.fact(fn.max_dupes(package_name, 2))
|
||||||
|
gen.fact(fn.multiple_unification_sets(package_name))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
gen.h2("Maximum number of nodes (virtual packages)")
|
||||||
|
for package_name in sorted(self.possible_virtuals()):
|
||||||
|
gen.fact(fn.max_dupes(package_name, 1))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
gen.h2("Possible package in link-run subDAG")
|
||||||
|
for name in sorted(self._link_run):
|
||||||
|
gen.fact(fn.possible_in_link_run(name))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
|
||||||
|
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||||
|
def possible_packages_facts(self, gen, fn):
|
||||||
|
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||||
|
counter = collections.Counter(
|
||||||
|
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||||
|
)
|
||||||
|
gen.h2("Maximum number of nodes")
|
||||||
|
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||||
|
count = min(count, 2)
|
||||||
|
gen.fact(fn.max_dupes(pkg, count))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
gen.h2("Build unification sets ")
|
||||||
|
for name in sorted(self.possible_dependencies() & build_tools):
|
||||||
|
gen.fact(fn.multiple_unification_sets(name))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
gen.h2("Possible package in link-run subDAG")
|
||||||
|
for name in sorted(self._link_run):
|
||||||
|
gen.fact(fn.possible_in_link_run(name))
|
||||||
|
gen.newline()
|
||||||
|
|
||||||
|
counter = collections.Counter(
|
||||||
|
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||||
|
)
|
||||||
|
gen.h2("Maximum number of virtual nodes")
|
||||||
|
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||||
|
gen.fact(fn.max_dupes(pkg, count))
|
||||||
|
gen.newline()
|
||||||
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
|
% Avoid cycles in the DAG
|
||||||
|
%
|
||||||
|
% Some combinations of conditional dependencies can result in cycles;
|
||||||
|
% this ensures that we solve around them. Note that these rules are quite
|
||||||
|
% demanding on both grounding and solving, since they need to compute and
|
||||||
|
% consider all possible paths between pair of nodes.
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
#program no_cycle.
|
||||||
|
path(Parent, Child) :- depends_on(Parent, Child).
|
||||||
|
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||||
|
:- path(A, A).
|
||||||
|
|
||||||
|
#defined depends_on/2.
|
||||||
29
lib/spack/spack/solver/heuristic.lp
Normal file
29
lib/spack/spack/solver/heuristic.lp
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
|
% Heuristic to speed-up solves (node with ID 0)
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
%-----------------
|
||||||
|
% Domain heuristic
|
||||||
|
%-----------------
|
||||||
|
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
|
||||||
|
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||||
|
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||||
|
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
|
||||||
|
|
||||||
|
% Root node
|
||||||
|
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||||
|
|
||||||
|
% Providers
|
||||||
|
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||||
|
|
||||||
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
|
% Heuristic to speed-up solves (node with ID > 0)
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
|
% node(ID, _)
|
||||||
|
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
|
||||||
|
% node(ID, _), split build dependencies
|
||||||
|
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
@@ -3,9 +3,11 @@
|
|||||||
%
|
%
|
||||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
% OS compatibility rules for reusing solves.
|
% OS compatibility rules for reusing solves.
|
||||||
% os_compatible(RecentOS, OlderOS)
|
% os_compatible(RecentOS, OlderOS)
|
||||||
% OlderOS binaries can be used on RecentOS
|
% OlderOS binaries can be used on RecentOS
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
% macOS
|
% macOS
|
||||||
os_compatible("monterey", "bigsur").
|
os_compatible("monterey", "bigsur").
|
||||||
|
|||||||
27
lib/spack/spack/solver/when_possible.lp
Normal file
27
lib/spack/spack/solver/when_possible.lp
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
|
% Minimize the number of literals that are not solved
|
||||||
|
%
|
||||||
|
% This minimization is used for the "when_possible" concretization mode,
|
||||||
|
% otherwise we assume that all literals must be solved.
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
|
% Give clingo the choice to solve an input spec or not
|
||||||
|
{ solve_literal(ID) } :- literal(ID).
|
||||||
|
literal_not_solved(ID) :- not solve_literal(ID), literal(ID).
|
||||||
|
|
||||||
|
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||||
|
% looking for solutions to the ASP problem with "errors", which results
|
||||||
|
% in better reporting for users. See #30669 for details.
|
||||||
|
1 { solve_literal(ID) : literal(ID) }.
|
||||||
|
|
||||||
|
opt_criterion(300, "number of input specs not concretized").
|
||||||
|
#minimize{ 0@300: #true }.
|
||||||
|
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||||
|
|
||||||
|
#heuristic literal_solved(ID) : literal(ID). [1, sign]
|
||||||
|
#heuristic literal_solved(ID) : literal(ID). [50, init]
|
||||||
@@ -112,50 +112,49 @@
|
|||||||
"UnsatisfiableDependencySpecError",
|
"UnsatisfiableDependencySpecError",
|
||||||
"AmbiguousHashError",
|
"AmbiguousHashError",
|
||||||
"InvalidHashError",
|
"InvalidHashError",
|
||||||
"RedundantSpecError",
|
|
||||||
"SpecDeprecatedError",
|
"SpecDeprecatedError",
|
||||||
]
|
]
|
||||||
|
|
||||||
#: Valid pattern for an identifier in Spack
|
#: Valid pattern for an identifier in Spack
|
||||||
|
|
||||||
identifier_re = r"\w[\w-]*"
|
IDENTIFIER_RE = r"\w[\w-]*"
|
||||||
|
|
||||||
compiler_color = "@g" #: color for highlighting compilers
|
COMPILER_COLOR = "@g" #: color for highlighting compilers
|
||||||
version_color = "@c" #: color for highlighting versions
|
VERSION_COLOR = "@c" #: color for highlighting versions
|
||||||
architecture_color = "@m" #: color for highlighting architectures
|
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
|
||||||
enabled_variant_color = "@B" #: color for highlighting enabled variants
|
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
|
||||||
disabled_variant_color = "r" #: color for highlighting disabled varaints
|
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
|
||||||
dependency_color = "@." #: color for highlighting dependencies
|
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
|
||||||
hash_color = "@K" #: color for highlighting package hashes
|
HASH_COLOR = "@K" #: color for highlighting package hashes
|
||||||
|
|
||||||
#: This map determines the coloring of specs when using color output.
|
#: This map determines the coloring of specs when using color output.
|
||||||
#: We make the fields different colors to enhance readability.
|
#: We make the fields different colors to enhance readability.
|
||||||
#: See llnl.util.tty.color for descriptions of the color codes.
|
#: See llnl.util.tty.color for descriptions of the color codes.
|
||||||
color_formats = {
|
COLOR_FORMATS = {
|
||||||
"%": compiler_color,
|
"%": COMPILER_COLOR,
|
||||||
"@": version_color,
|
"@": VERSION_COLOR,
|
||||||
"=": architecture_color,
|
"=": ARCHITECTURE_COLOR,
|
||||||
"+": enabled_variant_color,
|
"+": ENABLED_VARIANT_COLOR,
|
||||||
"~": disabled_variant_color,
|
"~": DISABLED_VARIANT_COLOR,
|
||||||
"^": dependency_color,
|
"^": DEPENDENCY_COLOR,
|
||||||
"#": hash_color,
|
"#": HASH_COLOR,
|
||||||
}
|
}
|
||||||
|
|
||||||
#: Regex used for splitting by spec field separators.
|
#: Regex used for splitting by spec field separators.
|
||||||
#: These need to be escaped to avoid metacharacters in
|
#: These need to be escaped to avoid metacharacters in
|
||||||
#: ``color_formats.keys()``.
|
#: ``COLOR_FORMATS.keys()``.
|
||||||
_separators = "[\\%s]" % "\\".join(color_formats.keys())
|
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
|
||||||
|
|
||||||
#: Default format for Spec.format(). This format can be round-tripped, so that:
|
#: Default format for Spec.format(). This format can be round-tripped, so that:
|
||||||
#: Spec(Spec("string").format()) == Spec("string)"
|
#: Spec(Spec("string").format()) == Spec("string)"
|
||||||
default_format = (
|
DEFAULT_FORMAT = (
|
||||||
"{name}{@versions}"
|
"{name}{@versions}"
|
||||||
"{%compiler.name}{@compiler.versions}{compiler_flags}"
|
"{%compiler.name}{@compiler.versions}{compiler_flags}"
|
||||||
"{variants}{arch=architecture}{/abstract_hash}"
|
"{variants}{arch=architecture}{/abstract_hash}"
|
||||||
)
|
)
|
||||||
|
|
||||||
#: Display format, which eliminates extra `@=` in the output, for readability.
|
#: Display format, which eliminates extra `@=` in the output, for readability.
|
||||||
display_format = (
|
DISPLAY_FORMAT = (
|
||||||
"{name}{@version}"
|
"{name}{@version}"
|
||||||
"{%compiler.name}{@compiler.version}{compiler_flags}"
|
"{%compiler.name}{@compiler.version}{compiler_flags}"
|
||||||
"{variants}{arch=architecture}{/abstract_hash}"
|
"{variants}{arch=architecture}{/abstract_hash}"
|
||||||
@@ -187,7 +186,7 @@ class InstallStatus(enum.Enum):
|
|||||||
|
|
||||||
def colorize_spec(spec):
|
def colorize_spec(spec):
|
||||||
"""Returns a spec colorized according to the colors specified in
|
"""Returns a spec colorized according to the colors specified in
|
||||||
color_formats."""
|
COLOR_FORMATS."""
|
||||||
|
|
||||||
class insert_color:
|
class insert_color:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -200,9 +199,9 @@ def __call__(self, match):
|
|||||||
return clr.cescape(sep)
|
return clr.cescape(sep)
|
||||||
self.last = sep
|
self.last = sep
|
||||||
|
|
||||||
return "%s%s" % (color_formats[sep], clr.cescape(sep))
|
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
|
||||||
|
|
||||||
return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
|
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
|
||||||
|
|
||||||
|
|
||||||
@lang.lazy_lexicographic_ordering
|
@lang.lazy_lexicographic_ordering
|
||||||
@@ -985,16 +984,14 @@ def __iter__(self):
|
|||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.edges)
|
return len(self.edges)
|
||||||
|
|
||||||
def add(self, edge):
|
def add(self, edge: DependencySpec):
|
||||||
"""Adds a new edge to this object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
edge (DependencySpec): edge to be added
|
|
||||||
"""
|
|
||||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||||
current_list = self.edges.setdefault(key, [])
|
if key in self.edges:
|
||||||
current_list.append(edge)
|
lst = self.edges[key]
|
||||||
current_list.sort(key=_sort_by_dep_types)
|
lst.append(edge)
|
||||||
|
lst.sort(key=_sort_by_dep_types)
|
||||||
|
else:
|
||||||
|
self.edges[key] = [edge]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||||
@@ -1927,19 +1924,15 @@ def _lookup_hash(self):
|
|||||||
store, or finally, binary caches."""
|
store, or finally, binary caches."""
|
||||||
import spack.environment
|
import spack.environment
|
||||||
|
|
||||||
matches = []
|
|
||||||
active_env = spack.environment.active_environment()
|
active_env = spack.environment.active_environment()
|
||||||
|
|
||||||
if active_env:
|
# First env, then store, then binary cache
|
||||||
env_matches = active_env.get_by_hash(self.abstract_hash) or []
|
matches = (
|
||||||
matches = [m for m in env_matches if m._satisfies(self)]
|
(active_env.all_matching_specs(self) if active_env else [])
|
||||||
if not matches:
|
or spack.store.STORE.db.query(self, installed=any)
|
||||||
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
|
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||||
matches = [m for m in db_matches if m._satisfies(self)]
|
)
|
||||||
if not matches:
|
|
||||||
query = spack.binary_distribution.BinaryCacheQuery(True)
|
|
||||||
remote_matches = query("/" + self.abstract_hash) or []
|
|
||||||
matches = [m for m in remote_matches if m._satisfies(self)]
|
|
||||||
if not matches:
|
if not matches:
|
||||||
raise InvalidHashError(self, self.abstract_hash)
|
raise InvalidHashError(self, self.abstract_hash)
|
||||||
|
|
||||||
@@ -1960,19 +1953,17 @@ def lookup_hash(self):
|
|||||||
spec = self.copy(deps=False)
|
spec = self.copy(deps=False)
|
||||||
# root spec is replaced
|
# root spec is replaced
|
||||||
if spec.abstract_hash:
|
if spec.abstract_hash:
|
||||||
new = self._lookup_hash()
|
spec._dup(self._lookup_hash())
|
||||||
spec._dup(new)
|
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
# Get dependencies that need to be replaced
|
# Get dependencies that need to be replaced
|
||||||
for node in self.traverse(root=False):
|
for node in self.traverse(root=False):
|
||||||
if node.abstract_hash:
|
if node.abstract_hash:
|
||||||
new = node._lookup_hash()
|
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
|
||||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
|
||||||
|
|
||||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||||
for node in self.traverse(root=False):
|
for node in self.traverse(root=False):
|
||||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
@@ -1985,9 +1976,7 @@ def replace_hash(self):
|
|||||||
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
||||||
return
|
return
|
||||||
|
|
||||||
spec_by_hash = self.lookup_hash()
|
self._dup(self.lookup_hash())
|
||||||
|
|
||||||
self._dup(spec_by_hash)
|
|
||||||
|
|
||||||
def to_node_dict(self, hash=ht.dag_hash):
|
def to_node_dict(self, hash=ht.dag_hash):
|
||||||
"""Create a dictionary representing the state of this Spec.
|
"""Create a dictionary representing the state of this Spec.
|
||||||
@@ -2983,9 +2972,12 @@ def _new_concretize(self, tests=False):
|
|||||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||||
name = providers[0]
|
name = providers[0]
|
||||||
|
|
||||||
assert name in answer
|
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
||||||
|
assert (
|
||||||
|
node in answer
|
||||||
|
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||||
|
|
||||||
concretized = answer[name]
|
concretized = answer[node]
|
||||||
self._dup(concretized)
|
self._dup(concretized)
|
||||||
|
|
||||||
def concretize(self, tests=False):
|
def concretize(self, tests=False):
|
||||||
@@ -3519,7 +3511,8 @@ def update_variant_validate(self, variant_name, values):
|
|||||||
for value in values:
|
for value in values:
|
||||||
if self.variants.get(variant_name):
|
if self.variants.get(variant_name):
|
||||||
msg = (
|
msg = (
|
||||||
"Cannot append a value to a single-valued " "variant with an already set value"
|
f"cannot append the new value '{value}' to the single-valued "
|
||||||
|
f"variant '{self.variants[variant_name]}'"
|
||||||
)
|
)
|
||||||
assert pkg_variant.multi, msg
|
assert pkg_variant.multi, msg
|
||||||
self.variants[variant_name].append(value)
|
self.variants[variant_name].append(value)
|
||||||
@@ -3719,15 +3712,19 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
|||||||
"""
|
"""
|
||||||
other = self._autospec(other)
|
other = self._autospec(other)
|
||||||
|
|
||||||
lhs = self.lookup_hash() or self
|
|
||||||
rhs = other.lookup_hash() or other
|
|
||||||
|
|
||||||
return lhs._intersects(rhs, deps)
|
|
||||||
|
|
||||||
def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
|
||||||
if other.concrete and self.concrete:
|
if other.concrete and self.concrete:
|
||||||
return self.dag_hash() == other.dag_hash()
|
return self.dag_hash() == other.dag_hash()
|
||||||
|
|
||||||
|
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||||
|
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
|
||||||
|
|
||||||
|
if (
|
||||||
|
self_hash
|
||||||
|
and other_hash
|
||||||
|
and not (self_hash.startswith(other_hash) or other_hash.startswith(self_hash))
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
# If the names are different, we need to consider virtuals
|
# If the names are different, we need to consider virtuals
|
||||||
if self.name != other.name and self.name and other.name:
|
if self.name != other.name and self.name and other.name:
|
||||||
if self.virtual and other.virtual:
|
if self.virtual and other.virtual:
|
||||||
@@ -3787,19 +3784,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
|||||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||||
if deps:
|
if deps:
|
||||||
return self._intersects_dependencies(other)
|
return self._intersects_dependencies(other)
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def satisfies(self, other, deps=True):
|
return True
|
||||||
"""
|
|
||||||
This checks constraints on common dependencies against each other.
|
|
||||||
"""
|
|
||||||
other = self._autospec(other)
|
|
||||||
|
|
||||||
lhs = self.lookup_hash() or self
|
|
||||||
rhs = other.lookup_hash() or other
|
|
||||||
|
|
||||||
return lhs._satisfies(rhs, deps=deps)
|
|
||||||
|
|
||||||
def _intersects_dependencies(self, other):
|
def _intersects_dependencies(self, other):
|
||||||
if not other._dependencies or not self._dependencies:
|
if not other._dependencies or not self._dependencies:
|
||||||
@@ -3836,7 +3822,7 @@ def _intersects_dependencies(self, other):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -3851,6 +3837,13 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
|||||||
# objects.
|
# objects.
|
||||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||||
|
|
||||||
|
# If the right-hand side has an abstract hash, make sure it's a prefix of the
|
||||||
|
# left-hand side's (abstract) hash.
|
||||||
|
if other.abstract_hash:
|
||||||
|
compare_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||||
|
if not compare_hash or not compare_hash.startswith(other.abstract_hash):
|
||||||
|
return False
|
||||||
|
|
||||||
# If the names are different, we need to consider virtuals
|
# If the names are different, we need to consider virtuals
|
||||||
if self.name != other.name and self.name and other.name:
|
if self.name != other.name and self.name and other.name:
|
||||||
# A concrete provider can satisfy a virtual dependency.
|
# A concrete provider can satisfy a virtual dependency.
|
||||||
@@ -4227,9 +4220,7 @@ def eq_node(self, other):
|
|||||||
def _cmp_iter(self):
|
def _cmp_iter(self):
|
||||||
"""Lazily yield components of self for comparison."""
|
"""Lazily yield components of self for comparison."""
|
||||||
|
|
||||||
cmp_spec = self.lookup_hash() or self
|
for item in self._cmp_node():
|
||||||
|
|
||||||
for item in cmp_spec._cmp_node():
|
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||||
@@ -4240,10 +4231,10 @@ def _cmp_iter(self):
|
|||||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||||
# TODO: spec hashing.
|
# TODO: spec hashing.
|
||||||
yield cmp_spec.process_hash() if cmp_spec.concrete else None
|
yield self.process_hash() if self.concrete else None
|
||||||
|
|
||||||
def deps():
|
def deps():
|
||||||
for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
|
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||||
yield dep.spec.name
|
yield dep.spec.name
|
||||||
yield tuple(sorted(dep.deptypes))
|
yield tuple(sorted(dep.deptypes))
|
||||||
yield hash(dep.spec)
|
yield hash(dep.spec)
|
||||||
@@ -4253,7 +4244,7 @@ def deps():
|
|||||||
def colorized(self):
|
def colorized(self):
|
||||||
return colorize_spec(self)
|
return colorize_spec(self)
|
||||||
|
|
||||||
def format(self, format_string=default_format, **kwargs):
|
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
|
||||||
r"""Prints out particular pieces of a spec, depending on what is
|
r"""Prints out particular pieces of a spec, depending on what is
|
||||||
in the format string.
|
in the format string.
|
||||||
|
|
||||||
@@ -4332,7 +4323,7 @@ def format(self, format_string=default_format, **kwargs):
|
|||||||
def write(s, c=None):
|
def write(s, c=None):
|
||||||
f = clr.cescape(s)
|
f = clr.cescape(s)
|
||||||
if c is not None:
|
if c is not None:
|
||||||
f = color_formats[c] + f + "@."
|
f = COLOR_FORMATS[c] + f + "@."
|
||||||
clr.cwrite(f, stream=out, color=color)
|
clr.cwrite(f, stream=out, color=color)
|
||||||
|
|
||||||
def write_attribute(spec, attribute, color):
|
def write_attribute(spec, attribute, color):
|
||||||
@@ -4531,7 +4522,7 @@ def tree(self, **kwargs):
|
|||||||
status_fn = kwargs.pop("status_fn", False)
|
status_fn = kwargs.pop("status_fn", False)
|
||||||
cover = kwargs.pop("cover", "nodes")
|
cover = kwargs.pop("cover", "nodes")
|
||||||
indent = kwargs.pop("indent", 0)
|
indent = kwargs.pop("indent", 0)
|
||||||
fmt = kwargs.pop("format", default_format)
|
fmt = kwargs.pop("format", DEFAULT_FORMAT)
|
||||||
prefix = kwargs.pop("prefix", None)
|
prefix = kwargs.pop("prefix", None)
|
||||||
show_types = kwargs.pop("show_types", False)
|
show_types = kwargs.pop("show_types", False)
|
||||||
deptypes = kwargs.pop("deptypes", "all")
|
deptypes = kwargs.pop("deptypes", "all")
|
||||||
@@ -5339,14 +5330,6 @@ class NoSuchSpecFileError(SpecFilenameError):
|
|||||||
"""Raised when a spec file doesn't exist."""
|
"""Raised when a spec file doesn't exist."""
|
||||||
|
|
||||||
|
|
||||||
class RedundantSpecError(spack.error.SpecError):
|
|
||||||
def __init__(self, spec, addition):
|
|
||||||
super().__init__(
|
|
||||||
"Attempting to add %s to spec %s which is already concrete."
|
|
||||||
" This is likely the result of adding to a spec specified by hash." % (addition, spec)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SpecFormatStringError(spack.error.SpecError):
|
class SpecFormatStringError(spack.error.SpecError):
|
||||||
"""Called for errors in Spec format strings."""
|
"""Called for errors in Spec format strings."""
|
||||||
|
|
||||||
|
|||||||
@@ -97,8 +97,10 @@ def remove(self, spec):
|
|||||||
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
|
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
|
||||||
msg += "expanded from a matrix and cannot be removed directly."
|
msg += "expanded from a matrix and cannot be removed directly."
|
||||||
raise SpecListError(msg)
|
raise SpecListError(msg)
|
||||||
assert len(remove) == 1
|
|
||||||
self.yaml_list.remove(remove[0])
|
# Remove may contain more than one string representation of the same spec
|
||||||
|
for item in remove:
|
||||||
|
self.yaml_list.remove(item)
|
||||||
|
|
||||||
# invalidate cache variables when we change the list
|
# invalidate cache variables when we change the list
|
||||||
self._expanded_list = None
|
self._expanded_list = None
|
||||||
@@ -197,7 +199,9 @@ def _expand_matrix_constraints(matrix_config):
|
|||||||
for combo in itertools.product(*expanded_rows):
|
for combo in itertools.product(*expanded_rows):
|
||||||
# Construct a combined spec to test against excludes
|
# Construct a combined spec to test against excludes
|
||||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||||
flat_combo = [Spec(x) for x in flat_combo]
|
|
||||||
|
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||||
|
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||||
|
|
||||||
test_spec = flat_combo[0].copy()
|
test_spec = flat_combo[0].copy()
|
||||||
for constraint in flat_combo[1:]:
|
for constraint in flat_combo[1:]:
|
||||||
|
|||||||
@@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
|
|||||||
|
|
||||||
if self.default_fetcher.cachable:
|
if self.default_fetcher.cachable:
|
||||||
for rel_path in reversed(list(self.mirror_paths)):
|
for rel_path in reversed(list(self.mirror_paths)):
|
||||||
cache_fetcher = spack.caches.fetch_cache.fetcher(
|
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
|
||||||
rel_path, digest, expand=expand, extension=extension
|
rel_path, digest, expand=expand, extension=extension
|
||||||
)
|
)
|
||||||
fetchers.insert(0, cache_fetcher)
|
fetchers.insert(0, cache_fetcher)
|
||||||
@@ -577,7 +577,7 @@ def check(self):
|
|||||||
self.fetcher.check()
|
self.fetcher.check()
|
||||||
|
|
||||||
def cache_local(self):
|
def cache_local(self):
|
||||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
|
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||||
|
|
||||||
def cache_mirror(self, mirror, stats):
|
def cache_mirror(self, mirror, stats):
|
||||||
"""Perform a fetch if the resource is not already cached
|
"""Perform a fetch if the resource is not already cached
|
||||||
|
|||||||
@@ -212,7 +212,7 @@ def create(configuration: ConfigurationType) -> Store:
|
|||||||
Args:
|
Args:
|
||||||
configuration: configuration to create a store.
|
configuration: configuration to create a store.
|
||||||
"""
|
"""
|
||||||
configuration = configuration or spack.config.config
|
configuration = configuration or spack.config.CONFIG
|
||||||
config_dict = configuration.get("config")
|
config_dict = configuration.get("config")
|
||||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||||
hash_length = configuration.get("config:install_hash_length")
|
hash_length = configuration.get("config:install_hash_length")
|
||||||
@@ -234,7 +234,7 @@ def create(configuration: ConfigurationType) -> Store:
|
|||||||
|
|
||||||
|
|
||||||
def _create_global() -> Store:
|
def _create_global() -> Store:
|
||||||
result = create(configuration=spack.config.config)
|
result = create(configuration=spack.config.CONFIG)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@@ -372,10 +372,10 @@ def use_store(
|
|||||||
|
|
||||||
# Swap the store with the one just constructed and return it
|
# Swap the store with the one just constructed and return it
|
||||||
ensure_singleton_created()
|
ensure_singleton_created()
|
||||||
spack.config.config.push_scope(
|
spack.config.CONFIG.push_scope(
|
||||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||||
)
|
)
|
||||||
temporary_store = create(configuration=spack.config.config)
|
temporary_store = create(configuration=spack.config.CONFIG)
|
||||||
original_store, STORE = STORE, temporary_store
|
original_store, STORE = STORE, temporary_store
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -383,7 +383,7 @@ def use_store(
|
|||||||
finally:
|
finally:
|
||||||
# Restore the original store
|
# Restore the original store
|
||||||
STORE = original_store
|
STORE = original_store
|
||||||
spack.config.config.remove_scope(scope_name=scope_name)
|
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||||
|
|
||||||
|
|
||||||
class MatchError(spack.error.SpackError):
|
class MatchError(spack.error.SpackError):
|
||||||
|
|||||||
@@ -94,14 +94,14 @@ class TestState:
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
if _SERIALIZE:
|
if _SERIALIZE:
|
||||||
self.config = spack.config.config
|
self.config = spack.config.CONFIG
|
||||||
self.platform = spack.platforms.host
|
self.platform = spack.platforms.host
|
||||||
self.test_patches = store_patches()
|
self.test_patches = store_patches()
|
||||||
self.store = spack.store.STORE
|
self.store = spack.store.STORE
|
||||||
|
|
||||||
def restore(self):
|
def restore(self):
|
||||||
if _SERIALIZE:
|
if _SERIALIZE:
|
||||||
spack.config.config = self.config
|
spack.config.CONFIG = self.config
|
||||||
spack.repo.PATH = spack.repo.create(self.config)
|
spack.repo.PATH = spack.repo.create(self.config)
|
||||||
spack.platforms.host = self.platform
|
spack.platforms.host = self.platform
|
||||||
spack.store.STORE = self.store
|
spack.store.STORE = self.store
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -199,15 +200,11 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.usefixtures("mock_packages", "config")
|
@pytest.mark.usefixtures("mock_packages", "config")
|
||||||
|
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||||
|
|
||||||
# use foobar=bar to make the problem simpler for the old concretizer
|
|
||||||
# the new concretizer should not need that help
|
|
||||||
if spack.config.get("config:concretizer") == "original":
|
|
||||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
|
||||||
|
|
||||||
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
||||||
root_target_range,
|
root_target_range,
|
||||||
dep_target_range,
|
dep_target_range,
|
||||||
@@ -227,6 +224,7 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
|
|||||||
(["21.11", "21.9"], None, False),
|
(["21.11", "21.9"], None, False),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@pytest.mark.skipif(sys.platform == "win32", reason="Cray does not use windows")
|
||||||
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
|
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
|
||||||
ex_path = str(tmpdir.join("fake_craype_dir"))
|
ex_path = str(tmpdir.join("fake_craype_dir"))
|
||||||
fs.mkdirp(ex_path)
|
fs.mkdirp(ex_path)
|
||||||
|
|||||||
@@ -37,7 +37,7 @@
|
|||||||
from spack.paths import test_path
|
from spack.paths import test_path
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||||
|
|
||||||
mirror_cmd = spack.main.SpackCommand("mirror")
|
mirror_cmd = spack.main.SpackCommand("mirror")
|
||||||
install_cmd = spack.main.SpackCommand("install")
|
install_cmd = spack.main.SpackCommand("install")
|
||||||
@@ -51,7 +51,7 @@
|
|||||||
def cache_directory(tmpdir):
|
def cache_directory(tmpdir):
|
||||||
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
||||||
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
||||||
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
|
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
|
||||||
|
|
||||||
yield spack.config.caches
|
yield spack.config.caches
|
||||||
|
|
||||||
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
spack.config.config, old_config = cfg, spack.config.config
|
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
|
||||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
||||||
njobs = spack.config.get("config:build_jobs")
|
njobs = spack.config.get("config:build_jobs")
|
||||||
if not njobs:
|
if not njobs:
|
||||||
spack.config.set("config:build_jobs", 4, scope="user")
|
spack.config.set("config:build_jobs", 4, scope="user")
|
||||||
@@ -138,9 +138,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
|||||||
if not timeout:
|
if not timeout:
|
||||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||||
|
|
||||||
yield spack.config.config
|
yield spack.config.CONFIG
|
||||||
|
|
||||||
spack.config.config = old_config
|
spack.config.CONFIG = old_config
|
||||||
mutable_dir.remove()
|
mutable_dir.remove()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,11 +26,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
|||||||
user_path = str(tmpdir.join("store"))
|
user_path = str(tmpdir.join("store"))
|
||||||
with spack.store.use_store(user_path):
|
with spack.store.use_store(user_path):
|
||||||
assert spack.store.STORE.root == user_path
|
assert spack.store.STORE.root == user_path
|
||||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||||
assert spack.store.STORE.root == user_path
|
assert spack.store.STORE.root == user_path
|
||||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("38963")
|
@pytest.mark.regression("38963")
|
||||||
@@ -40,11 +40,11 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
|
|||||||
"""
|
"""
|
||||||
user_path = str(tmpdir.join("store"))
|
user_path = str(tmpdir.join("store"))
|
||||||
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
|
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
|
||||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("38963")
|
@pytest.mark.regression("38963")
|
||||||
@@ -54,15 +54,15 @@ def test_install_tree_customization_is_respected(mutable_config, tmp_path):
|
|||||||
"""
|
"""
|
||||||
spack.store.reinitialize()
|
spack.store.reinitialize()
|
||||||
store_dir = tmp_path / "store"
|
store_dir = tmp_path / "store"
|
||||||
spack.config.config.set("config:install_tree:root", str(store_dir))
|
spack.config.CONFIG.set("config:install_tree:root", str(store_dir))
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||||
assert (
|
assert (
|
||||||
spack.config.config.get("config:install_tree:root")
|
spack.config.CONFIG.get("config:install_tree:root")
|
||||||
== spack.bootstrap.config.store_path()
|
== spack.bootstrap.config.store_path()
|
||||||
)
|
)
|
||||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||||
assert spack.config.config.get("config:install_tree:root") == str(store_dir)
|
assert spack.config.CONFIG.get("config:install_tree:root") == str(store_dir)
|
||||||
assert spack.store.STORE.root == str(store_dir)
|
assert spack.store.STORE.root == str(store_dir)
|
||||||
|
|
||||||
|
|
||||||
@@ -185,12 +185,12 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
|
|||||||
|
|
||||||
def test_nested_use_of_context_manager(mutable_config):
|
def test_nested_use_of_context_manager(mutable_config):
|
||||||
"""Test nested use of the context manager"""
|
"""Test nested use of the context manager"""
|
||||||
user_config = spack.config.config
|
user_config = spack.config.CONFIG
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
assert spack.config.config != user_config
|
assert spack.config.CONFIG != user_config
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
assert spack.config.config != user_config
|
assert spack.config.CONFIG != user_config
|
||||||
assert spack.config.config == user_config
|
assert spack.config.CONFIG == user_config
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("expected_missing", [False, True])
|
@pytest.mark.parametrize("expected_missing", [False, True])
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -16,7 +15,7 @@
|
|||||||
|
|
||||||
install = spack.main.SpackCommand("install")
|
install = spack.main.SpackCommand("install")
|
||||||
|
|
||||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||||
|
|
||||||
|
|
||||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import posixpath
|
import posixpath
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -17,8 +16,9 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
|
from spack.build_environment import _static_to_shared_library, dso_suffix
|
||||||
from spack.paths import build_env_path
|
from spack.paths import build_env_path
|
||||||
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.path import Path, convert_to_platform_path
|
from spack.util.path import Path, convert_to_platform_path
|
||||||
@@ -119,7 +119,7 @@ def __call__(self, *args, **kwargs):
|
|||||||
return mock_module_cmd
|
return mock_module_cmd
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
|
@pytest.mark.not_on_windows("Static to Shared not supported on Win (yet)")
|
||||||
def test_static_to_shared_library(build_environment):
|
def test_static_to_shared_library(build_environment):
|
||||||
os.environ["SPACK_TEST_COMMAND"] = "dump-args"
|
os.environ["SPACK_TEST_COMMAND"] = "dump-args"
|
||||||
|
|
||||||
@@ -443,7 +443,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
|||||||
|
|
||||||
spack.build_environment.set_module_variables_for_package(s["b"].package)
|
spack.build_environment.set_module_variables_for_package(s["b"].package)
|
||||||
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||||
s["b"].package.parallel
|
parallel=s["b"].package.parallel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -475,28 +475,62 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
|
|||||||
|
|
||||||
def test_build_jobs_sequential_is_sequential():
|
def test_build_jobs_sequential_is_sequential():
|
||||||
assert (
|
assert (
|
||||||
determine_number_of_jobs(parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
|
determine_number_of_jobs(
|
||||||
|
parallel=False,
|
||||||
|
max_cpus=8,
|
||||||
|
config=spack.config.Configuration(
|
||||||
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
||||||
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
== 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_build_jobs_command_line_overrides():
|
def test_build_jobs_command_line_overrides():
|
||||||
assert (
|
assert (
|
||||||
determine_number_of_jobs(parallel=True, command_line=10, config_default=1, max_cpus=1)
|
determine_number_of_jobs(
|
||||||
|
parallel=True,
|
||||||
|
max_cpus=1,
|
||||||
|
config=spack.config.Configuration(
|
||||||
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
||||||
|
),
|
||||||
|
)
|
||||||
== 10
|
== 10
|
||||||
)
|
)
|
||||||
assert (
|
assert (
|
||||||
determine_number_of_jobs(parallel=True, command_line=10, config_default=100, max_cpus=100)
|
determine_number_of_jobs(
|
||||||
|
parallel=True,
|
||||||
|
max_cpus=100,
|
||||||
|
config=spack.config.Configuration(
|
||||||
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
||||||
|
),
|
||||||
|
)
|
||||||
== 10
|
== 10
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_build_jobs_defaults():
|
def test_build_jobs_defaults():
|
||||||
assert (
|
assert (
|
||||||
determine_number_of_jobs(parallel=True, command_line=None, config_default=1, max_cpus=10)
|
determine_number_of_jobs(
|
||||||
|
parallel=True,
|
||||||
|
max_cpus=10,
|
||||||
|
config=spack.config.Configuration(
|
||||||
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
||||||
|
),
|
||||||
|
)
|
||||||
== 1
|
== 1
|
||||||
)
|
)
|
||||||
assert (
|
assert (
|
||||||
determine_number_of_jobs(parallel=True, command_line=None, config_default=100, max_cpus=10)
|
determine_number_of_jobs(
|
||||||
|
parallel=True,
|
||||||
|
max_cpus=10,
|
||||||
|
config=spack.config.Configuration(
|
||||||
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
||||||
|
),
|
||||||
|
)
|
||||||
== 10
|
== 10
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import py.path
|
import py.path
|
||||||
import pytest
|
import pytest
|
||||||
@@ -43,7 +42,7 @@ def _func(dir_str):
|
|||||||
return _func
|
return _func
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="make not available on Windows")
|
@pytest.mark.not_on_windows("make not available on Windows")
|
||||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||||
class TestTargets:
|
class TestTargets:
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -92,7 +91,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
|||||||
s.package._if_ninja_target_execute("check")
|
s.package._if_ninja_target_execute("check")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="autotools not available on windows")
|
@pytest.mark.not_on_windows("autotools not available on windows")
|
||||||
@pytest.mark.usefixtures("config", "mock_packages")
|
@pytest.mark.usefixtures("config", "mock_packages")
|
||||||
class TestAutotoolsPackage:
|
class TestAutotoolsPackage:
|
||||||
def test_with_or_without(self, default_mock_concretization):
|
def test_with_or_without(self, default_mock_concretization):
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -106,10 +105,7 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
|||||||
assert value == expected
|
assert value == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.not_on_windows("log_ouput cannot currently be used outside of subprocess on Windows")
|
||||||
sys.platform == "win32",
|
|
||||||
reason="log_ouput cannot currently be used outside of subprocess on Windows",
|
|
||||||
)
|
|
||||||
@pytest.mark.regression("33928")
|
@pytest.mark.regression("33928")
|
||||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
@@ -153,7 +149,7 @@ def test_monkey_patching_test_log_file():
|
|||||||
|
|
||||||
# Windows context manager's __exit__ fails with ValueError ("I/O operation
|
# Windows context manager's __exit__ fails with ValueError ("I/O operation
|
||||||
# on closed file").
|
# on closed file").
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="Does not run on windows")
|
@pytest.mark.not_on_windows("Does not run on windows")
|
||||||
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
||||||
"""Confirm able to run stand-alone test as a post-install callback."""
|
"""Confirm able to run stand-alone test as a post-install callback."""
|
||||||
s = spack.spec.Spec("py-test-callback").concretized()
|
s = spack.spec.Spec("py-test-callback").concretized()
|
||||||
|
|||||||
@@ -31,13 +31,16 @@ def test_fetch_missing_cache(tmpdir, _fetch_method):
|
|||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||||
def test_fetch(tmpdir, _fetch_method):
|
def test_fetch(tmpdir, _fetch_method):
|
||||||
"""Ensure a fetch after expanding is effectively a no-op."""
|
"""Ensure a fetch after expanding is effectively a no-op."""
|
||||||
testpath = str(tmpdir)
|
cache_dir = tmpdir.join("cache")
|
||||||
cache = os.path.join(testpath, "cache.tar.gz")
|
stage_dir = tmpdir.join("stage")
|
||||||
|
mkdirp(cache_dir)
|
||||||
|
mkdirp(stage_dir)
|
||||||
|
cache = os.path.join(cache_dir, "cache.tar.gz")
|
||||||
touch(cache)
|
touch(cache)
|
||||||
url = url_util.path_to_file_url(cache)
|
url = url_util.path_to_file_url(cache)
|
||||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||||
fetcher = CacheURLFetchStrategy(url=url)
|
fetcher = CacheURLFetchStrategy(url=url)
|
||||||
with Stage(fetcher, path=testpath) as stage:
|
with Stage(fetcher, path=str(stage_dir)) as stage:
|
||||||
source_path = stage.source_path
|
source_path = stage.source_path
|
||||||
mkdirp(source_path)
|
mkdirp(source_path)
|
||||||
fetcher.fetch()
|
fetcher.fetch()
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
arguments correctly.
|
arguments correctly.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -145,7 +144,7 @@
|
|||||||
+ test_args_without_paths
|
+ test_args_without_paths
|
||||||
)
|
)
|
||||||
|
|
||||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user