Compare commits
340 Commits
develop-20
...
paged-outp
Author | SHA1 | Date | |
---|---|---|---|
![]() |
81a1f97779 | ||
![]() |
b932c14008 | ||
![]() |
285f95a4d8 | ||
![]() |
3de68ef976 | ||
![]() |
5c7fe24bec | ||
![]() |
ecb122f4c1 | ||
![]() |
6219780691 | ||
![]() |
8ec1369d2b | ||
![]() |
e3fcc41162 | ||
![]() |
ae582c45c3 | ||
![]() |
252a4d1076 | ||
![]() |
df37a8ba76 | ||
![]() |
99d06b95a3 | ||
![]() |
38829b01df | ||
![]() |
2a6a6602da | ||
![]() |
1527e9703d | ||
![]() |
4a22df5477 | ||
![]() |
2b4f2daa73 | ||
![]() |
02501bc4af | ||
![]() |
7cd039d022 | ||
![]() |
1ff81c1c88 | ||
![]() |
3e3cb73446 | ||
![]() |
8e948c03fc | ||
![]() |
572e790b3d | ||
![]() |
1873d6909a | ||
![]() |
4a24ab53df | ||
![]() |
671c394d32 | ||
![]() |
ce3b511f59 | ||
![]() |
03073a5fed | ||
![]() |
787bff0d6a | ||
![]() |
2504a76079 | ||
![]() |
f665f4c41b | ||
![]() |
4cab31323c | ||
![]() |
fcbe8c50cd | ||
![]() |
37de90c98c | ||
![]() |
5ccd9dc64b | ||
![]() |
1f59ada2c2 | ||
![]() |
a8a402115b | ||
![]() |
c2f3539a5e | ||
![]() |
cdeb67ec02 | ||
![]() |
2ddd8cd1aa | ||
![]() |
5b352c3088 | ||
![]() |
95c26245c1 | ||
![]() |
6a0e03b81c | ||
![]() |
858f70bf6f | ||
![]() |
123c26c22d | ||
![]() |
b42ef1e7b8 | ||
![]() |
2f2c65f56b | ||
![]() |
883d0739e6 | ||
![]() |
f1a31fe5f7 | ||
![]() |
c3785f4d30 | ||
![]() |
cc8983cf82 | ||
![]() |
30cea3ce8a | ||
![]() |
1252bd975c | ||
![]() |
6547758b2f | ||
![]() |
c633149874 | ||
![]() |
d640ce74e0 | ||
![]() |
6d2cc2d27a | ||
![]() |
43f180c2c5 | ||
![]() |
0685c6277e | ||
![]() |
eaabde6ee9 | ||
![]() |
87505fc2fc | ||
![]() |
d7d886e3b5 | ||
![]() |
0b3bd1e294 | ||
![]() |
b9b7450f60 | ||
![]() |
a6b0dfbd53 | ||
![]() |
ecc3752ee9 | ||
![]() |
8e2caa2b83 | ||
![]() |
25af7a36aa | ||
![]() |
38daed0a78 | ||
![]() |
fc3b732b14 | ||
![]() |
382847976f | ||
![]() |
c1b423849c | ||
![]() |
45ea09a79f | ||
![]() |
b96af088d1 | ||
![]() |
d47478d7b6 | ||
![]() |
4763581642 | ||
![]() |
d264094fdc | ||
![]() |
3c8c7ef341 | ||
![]() |
f83beb09ba | ||
![]() |
3604e5bffc | ||
![]() |
7fba228cf3 | ||
![]() |
1d379d96ab | ||
![]() |
f3edc33a07 | ||
![]() |
8d4ea9dbd3 | ||
![]() |
742d313ba8 | ||
![]() |
70407e8970 | ||
![]() |
2d42675035 | ||
![]() |
4c50915d81 | ||
![]() |
3f8d5fed39 | ||
![]() |
66c1c213b1 | ||
![]() |
f46528ec6b | ||
![]() |
41489efa4c | ||
![]() |
3df5a85237 | ||
![]() |
8921612f6a | ||
![]() |
e6a0a6c145 | ||
![]() |
104d6b4484 | ||
![]() |
cba9436cf4 | ||
![]() |
9dc3ad4db7 | ||
![]() |
4bfd7aeb25 | ||
![]() |
fcf615b53e | ||
![]() |
1155318534 | ||
![]() |
a3c430e810 | ||
![]() |
41ff0500f9 | ||
![]() |
059a4a58e2 | ||
![]() |
14513ba76f | ||
![]() |
21da90e062 | ||
![]() |
a3c7e97463 | ||
![]() |
f7ed3ce4ae | ||
![]() |
36caa6158a | ||
![]() |
1904d99fd0 | ||
![]() |
de0b17c07f | ||
![]() |
5d695623db | ||
![]() |
3f063ace1d | ||
![]() |
47b71ba8ca | ||
![]() |
67eb9cfccb | ||
![]() |
dddbd944a4 | ||
![]() |
b7d85e7694 | ||
![]() |
f4c4b06a46 | ||
![]() |
6995010bab | ||
![]() |
2d212561fb | ||
![]() |
7cab3e2383 | ||
![]() |
48ca9a5f3c | ||
![]() |
1934c8cf73 | ||
![]() |
42cd7c4f89 | ||
![]() |
ce654b6882 | ||
![]() |
94719a55b4 | ||
![]() |
76168292c3 | ||
![]() |
3fd6066e54 | ||
![]() |
c62cc6a45d | ||
![]() |
423548fc90 | ||
![]() |
9010e6f556 | ||
![]() |
6085586407 | ||
![]() |
dbd745bdab | ||
![]() |
31c5c0b423 | ||
![]() |
41f99f8131 | ||
![]() |
441ade5809 | ||
![]() |
60f6f8d836 | ||
![]() |
5e7925c502 | ||
![]() |
d39382bec8 | ||
![]() |
be270f2311 | ||
![]() |
c500200952 | ||
![]() |
71b110e6c7 | ||
![]() |
7b877ec9e2 | ||
![]() |
a74ac87d34 | ||
![]() |
796adb6b9b | ||
![]() |
2967bb5540 | ||
![]() |
9f4c677e46 | ||
![]() |
1d369ba02d | ||
![]() |
dcde4f9d5a | ||
![]() |
3c576ca8c2 | ||
![]() |
a89c89a23e | ||
![]() |
aee7455568 | ||
![]() |
69edcc6d2f | ||
![]() |
46263a493e | ||
![]() |
b24f2875e6 | ||
![]() |
18eebce04d | ||
![]() |
f5c6e10e08 | ||
![]() |
e7c17f7ed8 | ||
![]() |
a284cbf256 | ||
![]() |
8cbf067455 | ||
![]() |
875397cf16 | ||
![]() |
a38045f77e | ||
![]() |
31ce23f3fc | ||
![]() |
9e65bd5837 | ||
![]() |
2c1a3eff74 | ||
![]() |
1d81ceb101 | ||
![]() |
044c37372a | ||
![]() |
8f40889a46 | ||
![]() |
0a0282163b | ||
![]() |
54f4530df4 | ||
![]() |
193f3b3c5a | ||
![]() |
34b0e8ebce | ||
![]() |
10109bf128 | ||
![]() |
f0a7388496 | ||
![]() |
45bc8fd2a3 | ||
![]() |
ca82085c82 | ||
![]() |
b97fbcb970 | ||
![]() |
cf812dd3a9 | ||
![]() |
e78d9d93dd | ||
![]() |
be492e1ed7 | ||
![]() |
fcfbc28e10 | ||
![]() |
8fb5898d10 | ||
![]() |
b75e35289c | ||
![]() |
4024200d61 | ||
![]() |
eab1d6df80 | ||
![]() |
0d7c0c8362 | ||
![]() |
a573f2248d | ||
![]() |
986102ab7a | ||
![]() |
04f6881b76 | ||
![]() |
d4582945ba | ||
![]() |
a0940510df | ||
![]() |
c2327a2adf | ||
![]() |
4c075801db | ||
![]() |
1d27add307 | ||
![]() |
3256ad8e5c | ||
![]() |
dc8678136c | ||
![]() |
62ec0f6d33 | ||
![]() |
25d8e95ad4 | ||
![]() |
883bbf3826 | ||
![]() |
1dc9bac745 | ||
![]() |
8ac5398576 | ||
![]() |
dbd3895cbf | ||
![]() |
1d70dc8292 | ||
![]() |
4b2a96fe06 | ||
![]() |
4a7508c9df | ||
![]() |
4f27ef8157 | ||
![]() |
069010fe13 | ||
![]() |
0fa64f9791 | ||
![]() |
8d23edd1a9 | ||
![]() |
336d33ecfa | ||
![]() |
caaf0c50f6 | ||
![]() |
6b2cd0ca45 | ||
![]() |
0bec90ecd7 | ||
![]() |
1f77b33255 | ||
![]() |
6dab20e8f8 | ||
![]() |
f926512cd4 | ||
![]() |
4a08f5b6e4 | ||
![]() |
2cbc21d584 | ||
![]() |
b4646c340c | ||
![]() |
24efb56528 | ||
![]() |
19f7a1bfbd | ||
![]() |
bc5b57dca9 | ||
![]() |
ebef5f75fb | ||
![]() |
e45ee9cb92 | ||
![]() |
900fff77cd | ||
![]() |
e97a78ebcc | ||
![]() |
25beeef865 | ||
![]() |
b3ded1332e | ||
![]() |
b66694d1ca | ||
![]() |
ebb2bb206e | ||
![]() |
7a489e1e4e | ||
![]() |
940f47a47c | ||
![]() |
ccea1c6c9b | ||
![]() |
476863c4e8 | ||
![]() |
7794d51adb | ||
![]() |
0a6767e602 | ||
![]() |
b3585ff1b8 | ||
![]() |
7e9c24a789 | ||
![]() |
c5b227d14c | ||
![]() |
620d5c7ef8 | ||
![]() |
74f78bd24f | ||
![]() |
fa9dcb43bd | ||
![]() |
9a37a6fcb1 | ||
![]() |
4ae4739537 | ||
![]() |
493a307e4f | ||
![]() |
6fb5a1492a | ||
![]() |
cc3d40d9d3 | ||
![]() |
8fc1ccc686 | ||
![]() |
8a8d88aab9 | ||
![]() |
246ac7ced9 | ||
![]() |
fd1b982073 | ||
![]() |
fd31f7e014 | ||
![]() |
e70d7d4eb7 | ||
![]() |
b4b35f9efd | ||
![]() |
aa05af81d0 | ||
![]() |
3da44cff0b | ||
![]() |
95de0c021b | ||
![]() |
9347769d4b | ||
![]() |
8885f6b861 | ||
![]() |
c5e5ed3a3b | ||
![]() |
23d7305efd | ||
![]() |
252ceeedbe | ||
![]() |
6df832d979 | ||
![]() |
4a88884a8e | ||
![]() |
84dcc654ec | ||
![]() |
b6722ce5c9 | ||
![]() |
1cbee69bec | ||
![]() |
1cf311f217 | ||
![]() |
c960fa0996 | ||
![]() |
69a95bf1f8 | ||
![]() |
513142f154 | ||
![]() |
d6b6910654 | ||
![]() |
ae78c7698a | ||
![]() |
f4f1606298 | ||
![]() |
d2dd4e96d9 | ||
![]() |
4cb64e150f | ||
![]() |
b74e23a637 | ||
![]() |
8ffd6c29bf | ||
![]() |
4372907fc1 | ||
![]() |
63a506ed17 | ||
![]() |
382647c8af | ||
![]() |
4b73da5bb2 | ||
![]() |
17b47c9dbe | ||
![]() |
5075275873 | ||
![]() |
2acdacb129 | ||
![]() |
bedc7bd518 | ||
![]() |
dd8d2a2515 | ||
![]() |
129338c4c9 | ||
![]() |
e7b009e350 | ||
![]() |
9e6e478ccf | ||
![]() |
357089f347 | ||
![]() |
6228247eda | ||
![]() |
a919b67cb4 | ||
![]() |
58ac6f7cba | ||
![]() |
86b57c233d | ||
![]() |
b6dec56f4f | ||
![]() |
d403060cf2 | ||
![]() |
7d0dd27363 | ||
![]() |
cfbc92c2f0 | ||
![]() |
e7bca5b8f6 | ||
![]() |
cf5ba8aee3 | ||
![]() |
32a4eb4ebb | ||
![]() |
a3f4fd68d6 | ||
![]() |
95f8c7e073 | ||
![]() |
20f31ce39d | ||
![]() |
91ef8c056b | ||
![]() |
c6ce7637fc | ||
![]() |
770c6cc612 | ||
![]() |
97bad2f5a7 | ||
![]() |
8ca82fb2b6 | ||
![]() |
36540708f1 | ||
![]() |
7e027cae3e | ||
![]() |
a311d0a8c0 | ||
![]() |
cd8ebdcfbd | ||
![]() |
8b3bfbd95e | ||
![]() |
10afe49877 | ||
![]() |
2afbeded25 | ||
![]() |
415055d303 | ||
![]() |
081e4c463b | ||
![]() |
e5ec08771b | ||
![]() |
98605621e7 | ||
![]() |
625a4b854c | ||
![]() |
dcf2c8744a | ||
![]() |
d1b7cc9b5e | ||
![]() |
8fbe1ad941 | ||
![]() |
440ae973d1 | ||
![]() |
2cb140f9a8 | ||
![]() |
fb2cca4e1e | ||
![]() |
03b0d299f9 | ||
![]() |
8f93ea80fd | ||
![]() |
5cd5fcdd7f | ||
![]() |
da760a898e | ||
![]() |
dd55635fae | ||
![]() |
320c758fea | ||
![]() |
ff82ba24e9 | ||
![]() |
d00b05b71e | ||
![]() |
f8524f9d5e | ||
![]() |
924204828e | ||
![]() |
2f4c5f2aa2 |
2
.flake8
2
.flake8
@@ -28,7 +28,7 @@ max-line-length = 99
|
||||
# - F821: undefined name `name`
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
var/spack/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
|
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,4 +1,3 @@
|
||||
*.py diff=python
|
||||
*.lp linguist-language=Prolog
|
||||
lib/spack/external/* linguist-vendored
|
||||
*.bat text eol=crlf
|
||||
*.bat text eol=crlf
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
cmake bison bison-devel libstdc++-static gawk
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
|
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -42,17 +42,17 @@ jobs:
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/python/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
|
45
.github/workflows/prechecks.yml
vendored
45
.github/workflows/prechecks.yml
vendored
@@ -25,14 +25,16 @@ jobs:
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
@@ -40,23 +42,20 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
@@ -66,7 +65,11 @@ jobs:
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
# do not run if the commit message or PR description contains [skip-verify-checksums]
|
||||
if: >-
|
||||
${{ inputs.with_packages == 'true' &&
|
||||
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
|
||||
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
@@ -103,21 +106,3 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
@@ -1,7 +1,8 @@
|
||||
black==25.1.0
|
||||
clingo==5.7.1
|
||||
clingo==5.8.0
|
||||
flake8==7.2.0
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250304
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.7
|
||||
|
34
README.md
34
README.md
@@ -46,18 +46,42 @@ See the
|
||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||
for examples and highlights.
|
||||
|
||||
To install spack and your first package, make sure you have Python & Git.
|
||||
Installation
|
||||
----------------
|
||||
|
||||
To install spack, first make sure you have Python & Git.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
```bash
|
||||
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
|
||||
<br>
|
||||
|
||||
> [!TIP]
|
||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||
>
|
||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
</details>
|
||||
|
||||
```bash
|
||||
# For bash/zsh/sh
|
||||
. spack/share/spack/setup-env.sh
|
||||
|
||||
# For tcsh/csh
|
||||
source spack/share/spack/setup-env.csh
|
||||
|
||||
# For fish
|
||||
. spack/share/spack/setup-env.fish
|
||||
```
|
||||
|
||||
```bash
|
||||
# Now you're ready to install a package!
|
||||
spack install zlib-ng
|
||||
```
|
||||
|
||||
Documentation
|
||||
----------------
|
||||
|
||||
|
@@ -90,10 +90,9 @@ config:
|
||||
misc_cache: $user_cache_path/cache
|
||||
|
||||
|
||||
# Timeout in seconds used for downloading sources etc. This only applies
|
||||
# to the connection phase and can be increased for slow connections or
|
||||
# servers. 0 means no timeout.
|
||||
connect_timeout: 10
|
||||
# Abort downloads after this many seconds if not data is received.
|
||||
# Setting this to 0 will disable the timeout.
|
||||
connect_timeout: 30
|
||||
|
||||
|
||||
# If this is false, tools like curl that use SSL will not verify
|
||||
@@ -179,6 +178,10 @@ config:
|
||||
package_lock_timeout: null
|
||||
|
||||
|
||||
# pager(s) to use for commands with potentially long output (e.g., spack info)
|
||||
pager:
|
||||
- less -FXRS
|
||||
|
||||
# Control how shared libraries are located at runtime on Linux. See the
|
||||
# the Spack documentation for details.
|
||||
shared_linking:
|
||||
|
@@ -25,6 +25,8 @@ packages:
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-clang:
|
||||
buildable: false
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
|
@@ -72,6 +72,8 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cce:
|
||||
buildable: false
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
@@ -86,13 +88,23 @@ packages:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fj:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
glibc:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
iconv:
|
||||
prefer: [libiconv]
|
||||
mpt:
|
||||
buildable: false
|
||||
musl:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
xl:
|
||||
buildable: false
|
||||
|
@@ -11,4 +11,4 @@
|
||||
# ~/.spack/repos.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
@@ -23,3 +23,5 @@ packages:
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
msvc:
|
||||
buildable: false
|
||||
|
@@ -1916,7 +1916,7 @@ diagnostics. Issues, if found, are reported to stdout:
|
||||
PKG-DIRECTIVES: 1 issue found
|
||||
1. lammps: wrong variant in "conflicts" directive
|
||||
the variant 'adios' does not exist
|
||||
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
|
||||
|
||||
|
||||
------------
|
||||
|
@@ -83,7 +83,7 @@ packages. You can quickly find examples by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd var/spack/repos/builtin/packages
|
||||
$ cd var/spack/repos/spack_repo/builtin/packages
|
||||
$ grep -l QMakePackage */package.py
|
||||
|
||||
|
||||
|
@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
@@ -199,7 +199,7 @@ a variant to control this:
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
|
||||
focuses mostly on how Spack's build systems work.
|
||||
|
||||
In this guide, we will be using the
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
|
||||
packages as examples. ``perl``'s build system is a hand-written
|
||||
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
||||
installation. Both of these packages require custom build systems.
|
||||
|
@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
|
||||
is a good example of a simple package that does this, while
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
|
||||
is a good example of a more complex package.
|
||||
|
||||
""""""""""""""""""""""
|
||||
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
|
||||
]
|
||||
|
||||
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
|
||||
is a good example of a package that uses this strategy.
|
||||
|
||||
"""""""""""""
|
||||
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
|
||||
is a good example of a package that involves editing a Makefile to set
|
||||
the appropriate variables.
|
||||
|
||||
@@ -192,7 +192,7 @@ well for storing variables:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
|
||||
is a good example of a package that uses a dictionary to store
|
||||
configuration variables.
|
||||
|
||||
@@ -213,7 +213,7 @@ them in a list:
|
||||
inc.write(f"{var}\n")
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
|
||||
is a good example of a package that uses a list to store
|
||||
configuration variables.
|
||||
|
||||
|
@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
|
||||
If it isn't on CRAN, try Bioconductor, another common R repository.
|
||||
|
||||
For the purposes of this tutorial, we will be walking through
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
|
||||
as an example. If you search for "CRAN caret", you will quickly find what
|
||||
you are looking for at https://cran.r-project.org/package=caret.
|
||||
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
||||
@@ -337,7 +337,7 @@ Non-R dependencies
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some packages depend on non-R libraries for linking. Check out the
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
|
||||
package for an example: https://cloud.r-project.org/package=stringi.
|
||||
If you search for the text "SystemRequirements", you will see:
|
||||
|
||||
@@ -352,7 +352,7 @@ Passing arguments to the installation
|
||||
|
||||
Some R packages provide additional flags that can be passed to
|
||||
``R CMD INSTALL``, often to locate non-R dependencies.
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
|
||||
is an example of this, and flags for linking to an MPI library. To pass
|
||||
these to the installation command, you can override ``configure_args``
|
||||
like so:
|
||||
|
@@ -104,10 +104,10 @@ Finding available options
|
||||
|
||||
The first place to start when looking for a list of valid options to
|
||||
build a package is ``scons --help``. Some packages like
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
|
||||
don't bother overwriting the default SCons help message, so this isn't
|
||||
very useful, but other packages like
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
|
||||
print a list of valid command-line variables:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
|
||||
|
||||
|
||||
More advanced packages like
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
|
||||
use ``scons --help`` to print a list of subcommands:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -225,8 +225,14 @@ def setup(sphinx):
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:class", "llnl.util.lang.K"),
|
||||
("py:class", "llnl.util.lang.V"),
|
||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.K"),
|
||||
("py:obj", "llnl.util.lang.V"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -46,6 +46,12 @@ Each Spack configuration file is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
|
||||
.. tip::
|
||||
|
||||
Validation and autocompletion of Spack config files can be enabled in
|
||||
your editor with the YAML language server. See `spack/schemas
|
||||
<https://github.com/spack/schemas>`_ for more information.
|
||||
|
||||
.. _configuration-scopes:
|
||||
|
||||
--------------------
|
||||
|
@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
|
||||
|
||||
Modified files:
|
||||
|
||||
var/spack/repos/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/builtin/packages/hdf/package.py
|
||||
var/spack/repos/builtin/packages/netcdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
|
||||
=======================================================
|
||||
Flake8 checks were clean.
|
||||
|
||||
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
Flake8 found errors.
|
||||
|
||||
Most of the error messages are straightforward, but if you don't understand what
|
||||
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. warning::
|
||||
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
|
||||
If you're using a ``python`` from Spack and you installed
|
||||
``py-sphinx`` and friends, you need to make them available to your
|
||||
``python``. The easiest way to do this is to run:
|
||||
|
@@ -154,9 +154,7 @@ Package-related modules
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
Python module names, and Python class names.
|
||||
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
|
34
lib/spack/docs/env_vars_yaml.rst
Normal file
34
lib/spack/docs/env_vars_yaml.rst
Normal file
@@ -0,0 +1,34 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _env-vars-yaml:
|
||||
|
||||
=============================================
|
||||
Environment Variable Settings (env_vars.yaml)
|
||||
=============================================
|
||||
|
||||
Spack allows you to include shell environment variable modifications
|
||||
for a spack environment by including an ``env_vars.yaml``. Environment
|
||||
varaibles can be modified by setting, unsetting, appending, and prepending
|
||||
variables in the shell environment.
|
||||
The changes to the shell environment will take effect when the spack
|
||||
environment is activated.
|
||||
|
||||
for example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
|
@@ -1000,6 +1000,28 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
-------------------------------
|
||||
Modifying Environment Variables
|
||||
-------------------------------
|
||||
|
||||
Spack Environments can modify the active shell's environment variables when activated. The environment can be
|
||||
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
|
||||
file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
|
@@ -131,7 +131,7 @@ creates a simple python file:
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:lines: 5-
|
||||
|
||||
Spack also provides wrapper functions around common commands like
|
||||
|
@@ -75,6 +75,7 @@ or refer to the full manual below.
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
env_vars_yaml
|
||||
containers
|
||||
mirrors
|
||||
module_file_support
|
||||
|
@@ -128,7 +128,7 @@ depend on the spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
|
||||
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
|
||||
|
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
@@ -443,7 +443,7 @@ lives in:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location -p gmp
|
||||
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
|
||||
|
||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||
trouble of typing the full path.
|
||||
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
|
||||
handles creating package files for you, so you can skip most of the
|
||||
details here.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/spack_repo/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A Spack installation directory is structured like a standard UNIX
|
||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
|
||||
|
||||
If you ``cd`` to that directory, you will see directories for each
|
||||
package:
|
||||
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
|
||||
:shell:
|
||||
:ellipsis: 10
|
||||
|
||||
@@ -479,7 +479,7 @@ package lives in:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
|
||||
Alongside the ``package.py`` file, a package may contain extra
|
||||
directories or files (like patches) that it needs to build.
|
||||
@@ -492,7 +492,7 @@ Packages are named after the directory containing ``package.py``. So,
|
||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||
The ``package.py`` file defines a class called ``Libelf``, which
|
||||
extends Spack's ``Package`` class. For example, here is
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
@@ -520,7 +520,7 @@ these:
|
||||
$ spack install libelf@0.8.13
|
||||
|
||||
Spack sees the package name in the spec and looks for
|
||||
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
|
||||
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
|
||||
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
||||
``py-numpy/package.py``.
|
||||
|
||||
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
|
||||
In order to handle this, you can define a ``url_for_version()`` function
|
||||
like so:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.url_for_version
|
||||
|
||||
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
||||
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
|
||||
takes care of all of the plumbing and requires packagers to just define a proper
|
||||
``gnu_mirror_path`` attribute:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
|
||||
:lines: 9-18
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1995,7 +1995,7 @@ structure like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
|
||||
mvapich2/
|
||||
package.py
|
||||
ad_lustre_rwcontig_open_source.patch
|
||||
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
|
||||
|
||||
.. _pyside-patch:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
|
||||
:pyobject: PyPyside.patch
|
||||
:linenos:
|
||||
|
||||
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
|
||||
|
||||
$ spack resource show 3877ab54
|
||||
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
|
||||
applies to: builtin.m4
|
||||
|
||||
``spack resource show`` looks up downloadable resources from package
|
||||
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
|
||||
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
||||
$ spack resource show b37164268
|
||||
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
|
||||
applies to: builtin.boost
|
||||
patched by: builtin.dealii
|
||||
|
||||
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
|
||||
build system specific helper methods or attributes to provide, for instance,
|
||||
configure arguments:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
|
||||
:pyobject: M4.configure_args
|
||||
:linenos:
|
||||
|
||||
@@ -4110,7 +4110,7 @@ Shell command functions
|
||||
|
||||
Recall the install method from ``libelf``:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:pyobject: Libelf.install
|
||||
:linenos:
|
||||
|
||||
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
|
||||
additional properties on it to help you out. E.g., in openmpi, you'll
|
||||
find this:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.setup_dependent_package
|
||||
|
||||
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
||||
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
|
||||
* - Parent/Provider Package
|
||||
- Stand-alone Tests
|
||||
* - `C
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
|
||||
- Compiles ``hello.c`` and runs it
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
|
||||
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
||||
* - :ref:`PythonPackage <pythonpackage>`
|
||||
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
||||
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
One example of a package that adds its own stand-alone tests to those
|
||||
"inherited" by the virtual package it provides an implementation for is
|
||||
the `Openmpi package
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
|
||||
|
||||
Below are snippets from running and viewing the stand-alone test results
|
||||
for ``openmpi``:
|
||||
|
@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
|
||||
=================================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
|
||||
directory that Spack searches when it needs to find a package by name.
|
||||
You may need to maintain packages for restricted, proprietary or
|
||||
experimental software separately from the built-in repository. Spack
|
||||
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
The file starts with ``repos:`` and contains a single ordered list of
|
||||
paths to repositories. Each path is on a separate line starting with
|
||||
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /opt/local-repo
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /opt/repos/spack_repo/local_repo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
||||
it searches these repositories in order (first to last) to resolve each
|
||||
package name. In this example, Spack will look for the following
|
||||
packages and use the first valid file:
|
||||
|
||||
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -101,14 +101,15 @@ Namespaces
|
||||
|
||||
Every repository in Spack has an associated **namespace** defined in its
|
||||
top-level ``repo.yaml`` file. If you look at
|
||||
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
see that its namespace is ``builtin``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cat var/spack/repos/builtin/repo.yaml
|
||||
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
|
||||
repo:
|
||||
namespace: builtin
|
||||
api: v2.0
|
||||
|
||||
Spack records the repository namespace of each installed package. For
|
||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||
@@ -217,15 +218,15 @@ Suppose you have three repositories: the builtin Spack repo
|
||||
repo containing your own prototype packages (``proto``). Suppose they
|
||||
contain packages as follows:
|
||||
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+====================================+=============================+
|
||||
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+===============================================+=============================+
|
||||
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
|
||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
@@ -233,8 +234,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
@@ -243,9 +244,9 @@ If, instead, ``repos.yaml`` looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- ~/proto
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- ~/my_spack_repos/spack_repo/proto
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||
|
||||
@@ -326,8 +327,8 @@ files, use ``spack repo list``.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
myrepo ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Each repository is listed with its associated namespace. To get the raw,
|
||||
merged YAML from all configuration files, use ``spack config get repos``:
|
||||
@@ -335,9 +336,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get repos
|
||||
repos:srepos:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
repos:
|
||||
- ~/my_spack_repos/spack_repo/myrepo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
@@ -351,66 +352,54 @@ yourself; you can use the ``spack repo create`` command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo
|
||||
$ spack repo create ~/my_spack_repos myrepo
|
||||
==> Created repo with namespace 'myrepo'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
$ ls ~/my_spack_repos/spack_repo/myrepo
|
||||
packages/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'myrepo'
|
||||
api: v2.0
|
||||
|
||||
By default, the namespace of a new repo matches its directory's name.
|
||||
You can supply a custom namespace with a second argument, e.g.:
|
||||
Namespaces can also be nested, which can be useful if you have
|
||||
multiple package repositories for an organization. Spack will
|
||||
create the corresponding directory structure for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo llnl.comp
|
||||
$ spack repo create ~/my_spack_repos llnl.comp
|
||||
==> Created repo with namespace 'llnl.comp'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
|
||||
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
api: v2.0
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once your repository is created, you can register it with Spack with
|
||||
``spack repo add``:
|
||||
``spack repo add``. You nee to specify the path to the directory that
|
||||
contains the ``repo.yaml`` file.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo add ./myrepo
|
||||
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Added repo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
llnl.comp ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
|
||||
This simply adds the repo to your ``repos.yaml`` file.
|
||||
|
||||
@@ -432,46 +421,43 @@ By namespace:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm llnl.comp
|
||||
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
By path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm ~/myrepo
|
||||
==> Removed repository ~/myrepo
|
||||
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
Package repositories are implemented as Python packages. To be precise,
|
||||
they are `namespace packages
|
||||
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
|
||||
with ``spack_repo`` the top-level namespace, followed by the repository
|
||||
namespace as submodules. For example, the builtin repository corresponds
|
||||
to the Python module ``spack_repo.builtin.packages``.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
This structure allows you to extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
from spack_repo.builtin.packages.mpich.package import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
Spack populates ``sys.path`` at runtime with the path to the root of your
|
||||
package repository's ``spack_repo`` directory.
|
||||
|
@@ -5,9 +5,9 @@ sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.27.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.1.2
|
||||
flake8==7.2.0
|
||||
mypy==1.11.1
|
||||
|
13
lib/spack/external/__init__.py
vendored
13
lib/spack/external/__init__.py
vendored
@@ -11,6 +11,7 @@
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.3
|
||||
* License: MIT
|
||||
|
||||
archspec
|
||||
--------
|
||||
@@ -18,6 +19,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
* License: Apache-2.0 or MIT
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -25,6 +27,7 @@
|
||||
* Homepage: https://github.com/simonpercivall/astunparse
|
||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||
* Version: 1.6.3 (plus modifications)
|
||||
* License: PSF-2.0
|
||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||
modified, and we want to track coverage for it.
|
||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||
@@ -41,6 +44,7 @@
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 22.1.0
|
||||
* License: MIT
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -48,6 +52,7 @@
|
||||
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
||||
* Usage: Functions to parse build logs and extract error messages.
|
||||
* Version: Unversioned
|
||||
* License: BSD-3-Clause
|
||||
* Note: This is a homemade port of Kitware's CTest build handler.
|
||||
|
||||
distro
|
||||
@@ -56,6 +61,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.8.0
|
||||
* License: Apache-2.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
@@ -63,6 +69,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -70,6 +77,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||
* Usage: An implementation of JSON Schema for Python.
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* License: MIT
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
macholib
|
||||
@@ -78,6 +86,7 @@
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.16.2
|
||||
* License: MIT
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
@@ -85,6 +94,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
@@ -92,6 +102,7 @@
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.18.0
|
||||
* License: MIT
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
@@ -101,6 +112,7 @@
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.17.21
|
||||
* License: MIT
|
||||
|
||||
six
|
||||
---
|
||||
@@ -108,5 +120,6 @@
|
||||
* Homepage: https://pypi.python.org/pypi/six
|
||||
* Usage: Python 2 and 3 compatibility utilities.
|
||||
* Version: 1.16.0
|
||||
* License: MIT
|
||||
|
||||
"""
|
||||
|
@@ -764,7 +764,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
|
@@ -15,7 +15,20 @@
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -424,46 +437,39 @@ def add_func_to_class(name, func):
|
||||
return cls
|
||||
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(collections.abc.MutableMapping):
|
||||
class HashableMap(typing.MutableMapping[K, V]):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
__slots__ = ("dict",)
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {}
|
||||
self.dict: Dict[K, V] = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
def __getitem__(self, key: K) -> V:
|
||||
return self.dict[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
def __setitem__(self, key: K, value: V) -> None:
|
||||
self.dict[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return iter(self.dict)
|
||||
|
||||
def __len__(self):
|
||||
def __len__(self) -> int:
|
||||
return len(self.dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
def __delitem__(self, key: K) -> None:
|
||||
del self.dict[key]
|
||||
|
||||
def _cmp_iter(self):
|
||||
for _, v in sorted(self.items()):
|
||||
yield v
|
||||
|
||||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
self_type = type(self)
|
||||
clone = self_type()
|
||||
|
||||
# Copy everything from this dict into it.
|
||||
for key in self:
|
||||
clone[key] = self[key].copy()
|
||||
return clone
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
"""Utility function for making string matching predicates.
|
||||
@@ -1047,19 +1053,28 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
ClassPropertyType = TypeVar("ClassPropertyType")
|
||||
|
||||
|
||||
class classproperty(Generic[ClassPropertyType]):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
the evaluation is injected at creation time and takes an owner (i.e., the class that
|
||||
originated the instance).
|
||||
"""
|
||||
|
||||
def __init__(self, callback):
|
||||
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
def __get__(self, instance, owner) -> ClassPropertyType:
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
#: A type alias that represents either a classproperty descriptor or a constant value of the same
|
||||
#: type. This allows derived classes to override a computed class-level property with a constant
|
||||
#: value while retaining type compatibility.
|
||||
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
|
@@ -18,7 +18,7 @@
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
package_api_version = (2, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
|
@@ -574,12 +574,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
|
||||
if sys.platform == "win32":
|
||||
module.nmake = Executable("nmake")
|
||||
module.msbuild = Executable("msbuild")
|
||||
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc")
|
||||
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc")
|
||||
# analog to configure for win32
|
||||
module.cscript = Executable("cscript")
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -846,7 +847,9 @@ def _remove_libtool_archives(self) -> None:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -86,7 +87,9 @@ def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
|
@@ -47,6 +47,11 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Relative path to compiler wrappers
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
#: Optimization flags
|
||||
opt_flags: Sequence[str] = []
|
||||
#: Flags for generating debug information
|
||||
debug_flags: Sequence[str] = []
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -68,7 +69,9 @@ class GoBuilder(BuilderWithDefaults):
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
@@ -23,6 +23,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1016,7 +1017,7 @@ def libs(self):
|
||||
debug_print(result)
|
||||
return result
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
@@ -1049,11 +1050,13 @@ def setup_run_environment(self, env):
|
||||
env.set("F77", self.prefix.bin.ifort)
|
||||
env.set("F90", self.prefix.bin.ifort)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
|
||||
#
|
||||
# They call _setup_dependent_env_callback() as well, but with the
|
||||
# dictionary kwarg compilers_of_client{} present and populated.
|
||||
@@ -1061,7 +1064,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# Handle everything in a callback version.
|
||||
self._setup_dependent_env_callback(env, dependent_spec)
|
||||
|
||||
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
|
||||
def _setup_dependent_env_callback(
|
||||
self,
|
||||
env: EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
compilers_of_client={},
|
||||
) -> None:
|
||||
# Expected to be called from a client's
|
||||
# setup_dependent_build_environment(),
|
||||
# with args extended to convey the client's compilers as needed.
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
@@ -114,5 +115,7 @@ def install(
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||
|
@@ -4,6 +4,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
@@ -57,7 +58,9 @@ def install(
|
||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
# octave does not like those environment variables to be set:
|
||||
env.unset("CC")
|
||||
env.unset("CXX")
|
||||
|
@@ -106,8 +106,8 @@ def install_component(self, installer_path):
|
||||
|
||||
bash = Executable("bash")
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env("HOME", self.prefix)
|
||||
# Installer writes files in ~/intel set HOME so it goes to staging directory
|
||||
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
||||
|
||||
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
|
||||
if not isdir(install_dir):
|
||||
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
@@ -13,9 +13,9 @@
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
find_namespace = lang.match_predicate(namespace_re)
|
||||
find_namespace = match_predicate(namespace_re)
|
||||
if self.py_namespace in namespaces:
|
||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||
|
||||
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
|
||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||
)
|
||||
if self.py_namespace in remaining_namespaces:
|
||||
namespace_init = lang.match_predicate(
|
||||
namespace_init = match_predicate(
|
||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
)
|
||||
ignore_namespace = True
|
||||
@@ -324,6 +324,27 @@ def get_external_python_for_prefix(self):
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
||||
"""Get the homepage from PyPI if available."""
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
@@ -351,25 +372,9 @@ class PythonPackage(PythonExtension):
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
|
@@ -3,8 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@@ -54,6 +54,32 @@ def install(self, pkg, spec, prefix):
|
||||
pkg.module.R(*args)
|
||||
|
||||
|
||||
def _homepage(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return None
|
||||
|
||||
|
||||
def _git(cls: "RPackage") -> Optional[str]:
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
"""Specialized class for packages that are built using R.
|
||||
|
||||
@@ -77,24 +103,7 @@ class RPackage(Package):
|
||||
|
||||
extends("r")
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
git: ClassProperty[Optional[str]] = classproperty(_git)
|
||||
|
@@ -5,8 +5,8 @@
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
@@ -19,6 +19,12 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
||||
if cls.racket_name:
|
||||
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
|
||||
return None
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
@@ -37,13 +43,7 @@ class RacketPackage(PackageBase):
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.racket_name:
|
||||
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
|
||||
|
||||
@spack.builder.builder("racket")
|
||||
|
@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if cls and spack.repo.is_package_module(cls.__module__):
|
||||
return cls
|
||||
return None
|
||||
|
||||
@@ -121,6 +121,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
new_cls_name,
|
||||
bases,
|
||||
{
|
||||
"__module__": package_cls.__module__,
|
||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||
"test_requires_compiler": property(
|
||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||
@@ -129,7 +130,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||
},
|
||||
)
|
||||
new_cls.__module__ = package_cls.__module__
|
||||
self.__class__ = new_cls
|
||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||
|
||||
@@ -185,10 +185,16 @@ def __init__(self, pkg):
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
# catch-all (*args, **kwargs)
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self,
|
||||
env: spack.util.environment.EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
return Adapter(pkg)
|
||||
@@ -402,7 +408,7 @@ def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
|
@@ -24,6 +24,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -149,10 +150,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
|
||||
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
|
||||
"""Determine which packages were added, removed or changed
|
||||
between rev1 and rev2, and return the names as a set"""
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
@@ -613,32 +614,40 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_spec, and attempts to copy the files into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Args:
|
||||
Parameters:
|
||||
job_spec: spec associated with spack install log
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
if not job_spec.concrete:
|
||||
tty.warn("Cannot copy artifacts for non-concrete specs")
|
||||
return
|
||||
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
if not os.path.isdir(package_metadata_root):
|
||||
# Fallback to using the stage directory
|
||||
job_pkg = job_spec.package
|
||||
|
||||
package_metadata_root = pathlib.Path(job_pkg.stage.path)
|
||||
archive_files = spack.builder.create(job_pkg).archive_files
|
||||
tty.warn("Package not installed, falling back to use stage dir")
|
||||
tty.debug(f"stage dir: {package_metadata_root}")
|
||||
else:
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if os.path.isdir(archive_root):
|
||||
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
|
||||
else:
|
||||
tty.debug(f"No archived files detected at {archive_root}")
|
||||
|
||||
# Try zipped and unzipped versions of the build log
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_log = package_metadata_root / "spack-build-out.txt"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@@ -651,11 +660,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
"""
|
||||
tty.debug(f"test stage: {test_stage}")
|
||||
if not os.path.exists(test_stage):
|
||||
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||
tty.error(msg)
|
||||
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
copy_files_to_artifacts(
|
||||
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
|
||||
)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
|
@@ -2,9 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import errno
|
||||
import glob
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
@@ -25,6 +29,7 @@
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
@@ -40,22 +45,67 @@
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
def copy_gzipped(glob_or_path: str, dest: str) -> None:
|
||||
"""Copy all of the files in the source glob/path to the destination.
|
||||
|
||||
Args:
|
||||
glob_or_path: path to file to test
|
||||
dest: destination path to copy to
|
||||
"""
|
||||
|
||||
files = glob.glob(glob_or_path)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
|
||||
)
|
||||
|
||||
def is_gzipped(path):
|
||||
with open(path, "rb") as fd:
|
||||
return compression.GZipFileType().matches_magic(fd)
|
||||
|
||||
for src in files:
|
||||
if is_gzipped(src):
|
||||
fs.copy(src, dest)
|
||||
else:
|
||||
# Compress and copy in one step
|
||||
src_name = os.path.basename(src)
|
||||
if os.path.isdir(dest):
|
||||
zipped = os.path.join(dest, f"{src_name}.gz")
|
||||
elif not dest.endswith(".gz"):
|
||||
zipped = f"{dest}.gz"
|
||||
else:
|
||||
zipped = dest
|
||||
|
||||
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
|
||||
shutil.copyfileobj(fin, fout)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(
|
||||
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
Args:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
compress_artifacts (bool): option to compress copied artifacts using Gzip
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
|
||||
if compress_artifacts:
|
||||
copy_gzipped(src, artifacts_dir)
|
||||
else:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
tty.warn(
|
||||
(
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
|
@@ -4,12 +4,14 @@
|
||||
|
||||
import argparse
|
||||
import difflib
|
||||
import functools
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import List, Optional, Union
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -30,6 +32,7 @@
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable as exe
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
@@ -436,7 +439,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
specfile_format (bool): specfile format of the current spec
|
||||
"""
|
||||
|
||||
def get_arg(name, default=None):
|
||||
@@ -458,6 +461,7 @@ def get_arg(name, default=None):
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
specfile_format = get_arg("specfile_format", False)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -479,6 +483,9 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
|
||||
if specfile_format:
|
||||
format_string = "[{specfile_version}] " + format_string
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
@@ -719,3 +726,123 @@ def __init__(self, cmd_name):
|
||||
long_msg += "\n ".join(similar)
|
||||
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
def find_pager(pager_candidates: List[str]) -> Optional[List[str]]:
|
||||
"""Find a pager from spack configuration.
|
||||
|
||||
Arguments:
|
||||
pager_candidates: list of candidate commands with optional arguments, e.g. "less -FXRS"
|
||||
|
||||
Returns:
|
||||
Arguments, including the found command, to launch the pager, or None if not found.
|
||||
"""
|
||||
for pager in pager_candidates:
|
||||
# split each string in the list of pagers into args
|
||||
argv = pager.split()
|
||||
if not argv:
|
||||
continue
|
||||
|
||||
# try to find the requested pager command
|
||||
command, *args = argv
|
||||
path = exe.which_string(command)
|
||||
if not path:
|
||||
continue
|
||||
|
||||
# return the execv args we need to launch this thing
|
||||
return [command] + args
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def spack_pager_candidates() -> List[str]:
|
||||
"""Get a list of pager candidates by consulting environment and config.
|
||||
|
||||
Order of precedence is:
|
||||
|
||||
1. ``SPACK_PAGER``: pager just for spack
|
||||
2. ``PAGER``: user's preferred pager, from their environment
|
||||
3. ``config:pager``: list of pager candidates in config
|
||||
|
||||
"""
|
||||
pager_candidates = []
|
||||
|
||||
spack_pager = os.environ.get("SPACK_PAGER")
|
||||
if spack_pager:
|
||||
pager_candidates.append(spack_pager)
|
||||
|
||||
pager = os.environ.get("PAGER")
|
||||
if pager:
|
||||
pager_candidates.append(pager)
|
||||
|
||||
config_pagers = spack.config.get("config:pager")
|
||||
if config_pagers:
|
||||
pager_candidates.extend(config_pagers)
|
||||
|
||||
return pager_candidates
|
||||
|
||||
|
||||
def paged(command_function: Callable) -> Callable:
|
||||
"""Decorator for commands whose output should be sent to a pager by default.
|
||||
|
||||
This will launch a subprocess for, e.g., ``less``, and will redirect ``stdout`` to it, as
|
||||
``git`` does for commands like ``git log``.
|
||||
|
||||
The command will attempt to maintain colored output while paging, so you need a pager
|
||||
that supports color, like ``less -R``. Spack defaults to using ``less -FXRS`` if it's
|
||||
found, and nothing if not. You probably *do not* want to use ``more`` or any other
|
||||
non-color-capable pager.
|
||||
"""
|
||||
pager_execv_args = find_pager(spack_pager_candidates())
|
||||
if not pager_execv_args:
|
||||
return command_function
|
||||
|
||||
@functools.wraps(command_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
# figure out if we're running the command with --help
|
||||
is_help = False
|
||||
if args and isinstance(args[-1], argparse.Namespace):
|
||||
is_help = args[-1].help
|
||||
|
||||
# don't page if not a tty, and don't page help output
|
||||
if not sys.stdout.isatty() or is_help:
|
||||
return command_function(*args, **kwargs)
|
||||
|
||||
# Flush any buffered output before redirection.
|
||||
sys.stdout.flush()
|
||||
|
||||
# save original stdout and original color setting
|
||||
original_stdout_fd = os.dup(sys.stdout.fileno())
|
||||
original_stdout_isatty = sys.stdout.isatty
|
||||
|
||||
# launch the pager
|
||||
proc = subprocess.Popen(pager_execv_args, stdin=subprocess.PIPE)
|
||||
|
||||
try:
|
||||
# Redirect stdout's file descriptor to the pager's stdin.
|
||||
os.dup2(proc.stdin.fileno(), sys.stdout.fileno())
|
||||
|
||||
# make spack think the pager is a tty
|
||||
sys.stdout.isatty = lambda: True
|
||||
|
||||
# run the decorated function
|
||||
result = command_function(*args, **kwargs)
|
||||
|
||||
# Flush any remaining output.
|
||||
sys.stdout.flush()
|
||||
|
||||
return result
|
||||
|
||||
finally:
|
||||
# quit cheating on isatty
|
||||
sys.stdout.isatty = original_stdout_isatty
|
||||
|
||||
# restore stdout
|
||||
os.dup2(original_stdout_fd, sys.stdout.fileno())
|
||||
os.close(original_stdout_fd)
|
||||
|
||||
# Close the pager's stdin and wait for it to finish.
|
||||
proc.stdin.close()
|
||||
proc.wait()
|
||||
|
||||
return wrapper
|
||||
|
@@ -76,9 +76,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=False,
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
@@ -192,28 +189,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_specs.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror")
|
||||
download.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
@@ -223,28 +206,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
@@ -380,14 +345,8 @@ def _specs_to_be_packaged(
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
if args.specs:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs))
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
@@ -529,22 +488,7 @@ def check_fn(args: argparse.Namespace):
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
specs_arg = args.specs
|
||||
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
@@ -578,13 +522,7 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec))
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
@@ -593,15 +531,6 @@ def download_fn(args):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
@@ -609,13 +538,7 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if args.root_specfile:
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
specs = spack.cmd.parse_specs(args.root_spec)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
@@ -453,7 +453,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@@ -493,6 +493,9 @@ def ci_rebuild(args):
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Clear the stage directory
|
||||
spack.stage.purge()
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
@@ -788,7 +791,9 @@ def ci_verify_versions(args):
|
||||
"""
|
||||
# Get a list of all packages that have been changed or added
|
||||
# between from_ref and to_ref
|
||||
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
|
||||
pkgs = spack.repo.get_all_package_diffs(
|
||||
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
|
||||
)
|
||||
|
||||
failed_version = False
|
||||
for pkg_name in pkgs:
|
||||
|
@@ -63,7 +63,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
@@ -216,5 +216,6 @@ def compiler(parser, args):
|
||||
"rm": compiler_remove,
|
||||
"info": compiler_info,
|
||||
"list": compiler_list,
|
||||
"ls": compiler_list,
|
||||
}
|
||||
action[args.compiler_command](args)
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -169,6 +170,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
||||
syaml.dump_config(flattened, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
|
||||
|
||||
@spack.cmd.paged
|
||||
def config_get(args):
|
||||
"""Dump merged YAML configuration for a specific section.
|
||||
|
||||
@@ -178,6 +180,7 @@ def config_get(args):
|
||||
print_configuration(args, blame=False)
|
||||
|
||||
|
||||
@spack.cmd.paged
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
print_configuration(args, blame=True)
|
||||
|
@@ -23,7 +23,7 @@
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
from spack.util.naming import pkg_name_to_class_name, simplify_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
|
||||
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.class_name = pkg_name_to_class_name(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
@@ -874,7 +874,7 @@ def get_name(name, url):
|
||||
|
||||
result = simplify_name(result)
|
||||
|
||||
if not valid_fully_qualified_module_name(result):
|
||||
if not re.match(r"^[a-z0-9-]+$", result):
|
||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||
|
||||
return result
|
||||
|
@@ -102,7 +102,7 @@ def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Sp
|
||||
)
|
||||
else:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
"package Spack knows how to find."
|
||||
)
|
||||
|
||||
sp.add_parser("list", help="list detectable packages, by repository and name")
|
||||
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
@@ -259,6 +259,7 @@ def external(parser, args):
|
||||
action = {
|
||||
"find": external_find,
|
||||
"list": external_list,
|
||||
"ls": external_list,
|
||||
"read-cray-manifest": external_read_cray_manifest,
|
||||
}
|
||||
action[args.external_command](args)
|
||||
|
@@ -51,6 +51,12 @@ def setup_parser(subparser):
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--specfile-format",
|
||||
action="store_true",
|
||||
help="show the specfile format for installed deps ",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -280,6 +286,7 @@ def root_decorator(spec, string):
|
||||
show_flags=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
print()
|
||||
@@ -301,6 +308,7 @@ def root_decorator(spec, string):
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
print()
|
||||
|
||||
@@ -355,6 +363,7 @@ def _find_query(args, env):
|
||||
return results, concretized_but_not_installed
|
||||
|
||||
|
||||
@cmd.paged
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
@@ -390,7 +399,12 @@ def find(parser, args):
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
display_results,
|
||||
args,
|
||||
decorator=decorator,
|
||||
all_headers=True,
|
||||
status_fn=status_fn,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.builder
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
@@ -481,6 +482,7 @@ def print_licenses(pkg, args):
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
@spack.cmd.paged
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
|
@@ -10,11 +10,14 @@
|
||||
import re
|
||||
import sys
|
||||
from html import escape
|
||||
from typing import Type
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
@@ -139,10 +142,10 @@ def name_only(pkgs, out):
|
||||
tty.msg("%d packages" % len(pkgs))
|
||||
|
||||
|
||||
def github_url(pkg):
|
||||
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
|
||||
"""Link to a package file on github."""
|
||||
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
|
||||
return url.format(pkg.name)
|
||||
mod_path = pkg.__module__.replace(".", "/")
|
||||
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
|
||||
|
||||
|
||||
def rows_for_ncols(elts, ncols):
|
||||
@@ -313,6 +316,7 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("</div>\n")
|
||||
|
||||
|
||||
@spack.cmd.paged
|
||||
def list(parser, args):
|
||||
# retrieve the formatter to use from args
|
||||
formatter = formatters[args.format]
|
||||
|
@@ -89,17 +89,17 @@ def setup_parser(subparser):
|
||||
|
||||
def pkg_add(args):
|
||||
"""add a package to the git stage with `git add`"""
|
||||
spack.repo.add_package_to_git_stage(args.packages)
|
||||
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
|
||||
|
||||
|
||||
def pkg_list(args):
|
||||
"""list packages associated with a particular spack git revision"""
|
||||
colify(spack.repo.list_packages(args.rev))
|
||||
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
|
||||
|
||||
|
||||
def pkg_diff(args):
|
||||
"""compare packages available in two different git revisions"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
|
||||
if u1:
|
||||
print("%s:" % args.rev1)
|
||||
@@ -114,21 +114,23 @@ def pkg_diff(args):
|
||||
|
||||
def pkg_removed(args):
|
||||
"""show packages removed since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""show packages added since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg_changed(args):
|
||||
"""show packages changed since a commit"""
|
||||
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||
packages = spack.repo.get_all_package_diffs(
|
||||
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
|
||||
)
|
||||
|
||||
if packages:
|
||||
colify(sorted(packages))
|
||||
|
@@ -4,6 +4,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -24,9 +25,7 @@ def setup_parser(subparser):
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
"namespace", help="name or namespace to identify packages in the repository"
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-d",
|
||||
@@ -138,7 +137,7 @@ def repo_remove(args):
|
||||
def repo_list(args):
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
repos: List[spack.repo.Repo] = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.from_path(r))
|
||||
@@ -146,17 +145,14 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
|
@@ -136,20 +136,7 @@ def solve(parser, args):
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
if unify == "when_possible":
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
@@ -166,3 +153,29 @@ def solve(parser, args):
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
elif unify:
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for spec in specs:
|
||||
tty.msg("SOLVING SPEC:", spec)
|
||||
result = solver.solve(
|
||||
[spec],
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -59,7 +59,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") and f.endswith("package.py")
|
||||
return f.startswith("var/spack/") and f.endswith("package.py")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
|
@@ -18,6 +18,10 @@ class Languages(enum.Enum):
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
"""Provides access to compiler attributes via `Package.compiler`. Useful for
|
||||
packages which do not yet access compiler properties via `self.spec[language]`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||
) -> None:
|
||||
@@ -79,6 +83,14 @@ def implicit_rpaths(self) -> List[str]:
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def opt_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.opt_flags
|
||||
|
||||
@property
|
||||
def debug_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.debug_flags
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
@@ -140,7 +152,7 @@ def c17_flag(self) -> str:
|
||||
@property
|
||||
def c23_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
language=Languages.C.value, standard="23"
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -190,6 +202,10 @@ def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
|
@@ -149,12 +149,12 @@ def _getfqdn():
|
||||
return socket.getfqdn()
|
||||
|
||||
|
||||
def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
vn.Version("8"): spack.spec.SpecfileV5,
|
||||
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1,
|
||||
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3,
|
||||
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4,
|
||||
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
@@ -824,7 +824,7 @@ def check(cond, msg):
|
||||
db = fdata["database"]
|
||||
check("version" in db, "no 'version' in JSON DB.")
|
||||
|
||||
self.db_version = vn.Version(db["version"])
|
||||
self.db_version = vn.StandardVersion.from_string(db["version"])
|
||||
if self.db_version > _DB_VERSION:
|
||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
|
||||
elif self.db_version < _DB_VERSION:
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -93,14 +93,13 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
tty.warn(f"Constructed spec for {spec.name} does not have a string representation")
|
||||
return False
|
||||
|
||||
try:
|
||||
spack.spec.Spec(str(spec))
|
||||
except spack.error.SpackError:
|
||||
llnl.util.tty.warn(
|
||||
tty.warn(
|
||||
"Constructed spec has a string representation but the string"
|
||||
" representation does not evaluate to a valid spec: {0}".format(str(spec))
|
||||
)
|
||||
@@ -109,20 +108,24 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
def path_to_dict(search_paths: List[str]) -> Dict[str, str]:
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
path_to_lib: Dict[str, str] = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
dir_iter = os.scandir(search_path)
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
continue
|
||||
with dir_iter as entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
if entry.is_file():
|
||||
path_to_lib[entry.path] = entry.name
|
||||
except OSError as e:
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
|
||||
return path_to_lib
|
||||
|
||||
|
@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if spack.repo.is_package_module(cls.__module__):
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
@@ -144,7 +144,6 @@ class Foo(Package):
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
@@ -31,7 +31,6 @@
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.spec_list
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
@@ -44,10 +43,10 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
from ..enums import ConfigScopePriority
|
||||
from .list import SpecList, SpecListError, SpecListParser
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
@@ -932,8 +931,10 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.new_specs: List[Spec] = []
|
||||
self.views: Dict[str, ViewDescriptor] = {}
|
||||
|
||||
#: Parser for spec lists
|
||||
self._spec_lists_parser = SpecListParser()
|
||||
#: Specs from "spack.yaml"
|
||||
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
|
||||
self.spec_lists: Dict[str, SpecList] = {}
|
||||
#: User specs from the last concretization
|
||||
self.concretized_user_specs: List[Spec] = []
|
||||
#: Roots associated with the last concretization, in order
|
||||
@@ -1001,26 +1002,6 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, entry):
|
||||
"""Process a single spec definition item."""
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = spack.spec.eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
assert len(entry) == 1
|
||||
|
||||
if when:
|
||||
for name, spec_list in entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
|
||||
@@ -1082,21 +1063,24 @@ def _process_concrete_includes(self):
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.views = {}
|
||||
self._sync_speclists()
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
def _sync_speclists(self):
|
||||
self.spec_lists = {}
|
||||
self.spec_lists.update(
|
||||
self._spec_lists_parser.parse_definitions(
|
||||
data=spack.config.CONFIG.get("definitions", [])
|
||||
)
|
||||
)
|
||||
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs(
|
||||
name=user_speclist_name, yaml_list=spec_list
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
@@ -1167,9 +1151,7 @@ def clear(self, re_read=False):
|
||||
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
||||
and needs to be maintained when re-reading an existing environment.
|
||||
"""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self.spec_lists = {}
|
||||
self._dev_specs = {}
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
@@ -1276,22 +1258,6 @@ def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def update_stale_references(self, from_list=None):
|
||||
"""Iterate over spec lists updating references."""
|
||||
if not from_list:
|
||||
from_list = next(iter(self.spec_lists.keys()))
|
||||
index = list(self.spec_lists.keys()).index(from_list)
|
||||
|
||||
# spec_lists is an OrderedDict to ensure lists read from the manifest
|
||||
# are maintainted in order, hence, all list entries after the modified
|
||||
# list may refer to the modified list requiring stale references to be
|
||||
# updated.
|
||||
for i, (name, speclist) in enumerate(
|
||||
list(self.spec_lists.items())[index + 1 :], index + 1
|
||||
):
|
||||
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
|
||||
speclist.update_reference(new_reference)
|
||||
|
||||
def add(self, user_spec, list_name=user_speclist_name):
|
||||
"""Add a single user_spec (non-concretized) to the Environment
|
||||
|
||||
@@ -1311,18 +1277,17 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
raise SpackEnvironmentError(f"no such package: {spec.name}")
|
||||
|
||||
list_to_change = self.spec_lists[list_name]
|
||||
existing = str(spec) in list_to_change.yaml_list
|
||||
if not existing:
|
||||
list_to_change.add(str(spec))
|
||||
self.update_stale_references(list_name)
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.add_user_spec(str(user_spec))
|
||||
else:
|
||||
self.manifest.add_definition(str(user_spec), list_name=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
return bool(not existing)
|
||||
|
||||
@@ -1366,18 +1331,17 @@ def change_existing_spec(
|
||||
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
||||
)
|
||||
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
||||
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
|
||||
raise ValueError(f"{str(match_spec)} matches multiple specs")
|
||||
|
||||
for idx, spec in matches:
|
||||
override_spec = Spec.override(spec, change_spec)
|
||||
self.spec_lists[list_name].replace(idx, str(override_spec))
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
||||
else:
|
||||
self.manifest.override_definition(
|
||||
str(spec), override=str(override_spec), list_name=list_name
|
||||
)
|
||||
self.update_stale_references(from_list=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
@@ -1405,22 +1369,17 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
|
||||
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
|
||||
# Remove specs from the appropriate spec list
|
||||
for spec in matches:
|
||||
if spec not in list_to_change:
|
||||
continue
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError as e:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
except SpecListError as e:
|
||||
msg = str(e)
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
@@ -1428,7 +1387,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
else:
|
||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||
|
||||
# If force, update stale concretized specs
|
||||
# Recompute "definitions" and user specs
|
||||
self._sync_speclists()
|
||||
new_specs = set(self.user_specs)
|
||||
|
||||
# If 'force', update stale concretized specs
|
||||
for spec in old_specs - new_specs:
|
||||
if force and spec in self.concretized_user_specs:
|
||||
i = self.concretized_user_specs.index(spec)
|
||||
@@ -1642,23 +1605,6 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
# Re-attach information on test dependencies
|
||||
if tests:
|
||||
# This is slow, but the information on test dependency is lost
|
||||
# after unification or when reading from a lockfile.
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
return concretized_specs
|
||||
|
||||
@property
|
||||
@@ -2366,8 +2312,12 @@ def update_environment_repository(self) -> None:
|
||||
|
||||
def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
"""Add the root node of the spec to the environment repository"""
|
||||
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
namespace: str = spec_node.namespace
|
||||
repository = spack.repo.create_or_construct(
|
||||
root=os.path.join(self.repos_path, namespace),
|
||||
namespace=namespace,
|
||||
package_api=spack.repo.PATH.get_repo(namespace).package_api,
|
||||
)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
@@ -2827,6 +2777,8 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2853,6 +2805,8 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2878,6 +2832,8 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
|
286
lib/spack/spack/environment/list.py
Normal file
286
lib/spack/spack/environment/list.py
Normal file
@@ -0,0 +1,286 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
import spack.variant
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class SpecList:
|
||||
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
|
||||
self.name = name
|
||||
self.yaml_list = yaml_list[:] if yaml_list is not None else []
|
||||
# Expansions can be expensive to compute and difficult to keep updated
|
||||
# We cache results and invalidate when self.yaml_list changes
|
||||
self.specs_as_yaml_list = expanded_list or []
|
||||
self._constraints = None
|
||||
self._specs: Optional[List[Spec]] = None
|
||||
|
||||
@property
|
||||
def is_matrix(self):
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def specs_as_constraints(self):
|
||||
if self._constraints is None:
|
||||
constraints = []
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
constraints.extend(_expand_matrix_constraints(item))
|
||||
else: # individual spec
|
||||
constraints.append([Spec(item)])
|
||||
self._constraints = constraints
|
||||
|
||||
return self._constraints
|
||||
|
||||
@property
|
||||
def specs(self) -> List[Spec]:
|
||||
if self._specs is None:
|
||||
specs: List[Spec] = []
|
||||
# This could be slightly faster done directly from yaml_list,
|
||||
# but this way is easier to maintain.
|
||||
for constraint_list in self.specs_as_constraints:
|
||||
spec = constraint_list[0].copy()
|
||||
for const in constraint_list[1:]:
|
||||
spec.constrain(const)
|
||||
specs.append(spec)
|
||||
self._specs = specs
|
||||
|
||||
return self._specs
|
||||
|
||||
def add(self, spec: Spec):
|
||||
spec_str = str(spec)
|
||||
self.yaml_list.append(spec_str)
|
||||
|
||||
# expanded list can be updated without invalidation
|
||||
if self.specs_as_yaml_list is not None:
|
||||
self.specs_as_yaml_list.append(spec_str)
|
||||
|
||||
# Invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def remove(self, spec):
|
||||
# Get spec to remove from list
|
||||
remove = [
|
||||
s
|
||||
for s in self.yaml_list
|
||||
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
|
||||
]
|
||||
if not remove:
|
||||
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
|
||||
msg += f"Either {spec} is not in {self.name} or {spec} is "
|
||||
msg += "expanded from a matrix and cannot be removed directly."
|
||||
raise SpecListError(msg)
|
||||
|
||||
# Remove may contain more than one string representation of the same spec
|
||||
for item in remove:
|
||||
self.yaml_list.remove(item)
|
||||
self.specs_as_yaml_list.remove(item)
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def extend(self, other: "SpecList", copy_reference=True) -> None:
|
||||
self.yaml_list.extend(other.yaml_list)
|
||||
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def __len__(self):
|
||||
return len(self.specs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.specs[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.specs)
|
||||
|
||||
|
||||
def _expand_matrix_constraints(matrix_config):
|
||||
# recurse so we can handle nested matrices
|
||||
expanded_rows = []
|
||||
for row in matrix_config["matrix"]:
|
||||
new_row = []
|
||||
for r in row:
|
||||
if isinstance(r, dict):
|
||||
# Flatten the nested matrix into a single row of constraints
|
||||
new_row.extend(
|
||||
[
|
||||
[" ".join([str(c) for c in expanded_constraint_list])]
|
||||
for expanded_constraint_list in _expand_matrix_constraints(r)
|
||||
]
|
||||
)
|
||||
else:
|
||||
new_row.append([r])
|
||||
expanded_rows.append(new_row)
|
||||
|
||||
excludes = matrix_config.get("exclude", []) # only compute once
|
||||
sigil = matrix_config.get("sigil", "")
|
||||
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
test_spec.constrain(constraint)
|
||||
|
||||
# Abstract variants don't have normal satisfaction semantics
|
||||
# Convert all variants to concrete types.
|
||||
# This method is best effort, so all existing variants will be
|
||||
# converted before any error is raised.
|
||||
# Catch exceptions because we want to be able to operate on
|
||||
# abstract specs without needing package information
|
||||
try:
|
||||
spack.spec.substitute_abstract_variants(test_spec)
|
||||
except spack.variant.UnknownVariantError:
|
||||
pass
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||
|
||||
# Add to list of constraints
|
||||
results.append(flat_combo)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _sigilify(item, sigil):
|
||||
if isinstance(item, dict):
|
||||
if sigil:
|
||||
item["sigil"] = sigil
|
||||
return item
|
||||
else:
|
||||
return sigil + item
|
||||
|
||||
|
||||
class Definition(NamedTuple):
|
||||
name: str
|
||||
yaml_list: List[Union[str, Dict]]
|
||||
when: Optional[str]
|
||||
|
||||
|
||||
class SpecListParser:
|
||||
"""Parse definitions and user specs from data in environments"""
|
||||
|
||||
def __init__(self):
|
||||
self.definitions: Dict[str, SpecList] = {}
|
||||
|
||||
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
|
||||
definitions_from_yaml: Dict[str, List[Definition]] = {}
|
||||
for item in data:
|
||||
value = self._parse_yaml_definition(item)
|
||||
definitions_from_yaml.setdefault(value.name, []).append(value)
|
||||
|
||||
self.definitions = {}
|
||||
self._build_definitions(definitions_from_yaml)
|
||||
return self.definitions
|
||||
|
||||
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
|
||||
definition = Definition(name=name, yaml_list=yaml_list, when=None)
|
||||
return self._speclist_from_definitions(name, [definition])
|
||||
|
||||
def _parse_yaml_definition(self, yaml_entry) -> Definition:
|
||||
when_string = yaml_entry.get("when")
|
||||
|
||||
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
attributes = ", ".join(x for x in yaml_entry if x != "when")
|
||||
error_msg = f"definition must have a single attribute, got many: {attributes}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
for name, yaml_list in yaml_entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
return Definition(name=name, yaml_list=yaml_list, when=when_string)
|
||||
|
||||
# If we are here, it means only "when" is in the entry
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
error_msg = "definition must have a single attribute, got none"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
|
||||
for name, definitions in definitions_from_yaml.items():
|
||||
self.definitions[name] = self._speclist_from_definitions(name, definitions)
|
||||
|
||||
def _speclist_from_definitions(self, name, definitions) -> SpecList:
|
||||
combined_yaml_list = []
|
||||
for def_part in definitions:
|
||||
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
|
||||
continue
|
||||
combined_yaml_list.extend(def_part.yaml_list)
|
||||
expanded_list = self._expand_yaml_list(combined_yaml_list)
|
||||
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
|
||||
|
||||
def _expand_yaml_list(self, raw_yaml_list):
|
||||
result = []
|
||||
for item in raw_yaml_list:
|
||||
if isinstance(item, str) and item.startswith("$"):
|
||||
result.extend(self._expand_reference(item))
|
||||
continue
|
||||
|
||||
value = item
|
||||
if isinstance(item, dict):
|
||||
value = self._expand_yaml_matrix(item)
|
||||
result.append(value)
|
||||
return result
|
||||
|
||||
def _expand_reference(self, item: str):
|
||||
sigil, name = "", item[1:]
|
||||
if name.startswith("^") or name.startswith("%"):
|
||||
sigil, name = name[0], name[1:]
|
||||
|
||||
if name not in self.definitions:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
|
||||
error_msg = f"trying to expand the name '{name}', which is not defined yet"
|
||||
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
value = self.definitions[name].specs_as_yaml_list
|
||||
if not sigil:
|
||||
return value
|
||||
return [_sigilify(x, sigil) for x in value]
|
||||
|
||||
def _expand_yaml_matrix(self, matrix_yaml):
|
||||
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
|
||||
if extra_attributes:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
if "matrix" not in matrix_yaml:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = "matrix is missing the 'matrix' attribute"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
# Assume data has been validated against the YAML schema
|
||||
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
|
||||
if "exclude" in matrix_yaml:
|
||||
result["exclude"] = matrix_yaml["exclude"]
|
||||
return result
|
||||
|
||||
|
||||
class SpecListError(SpackError):
|
||||
"""Error class for all errors related to SpecList objects."""
|
||||
|
||||
|
||||
class UndefinedReferenceError(SpecListError):
|
||||
"""Error class for undefined references in Spack stacks."""
|
||||
|
||||
|
||||
class InvalidSpecConstraintError(SpecListError):
|
||||
"""Error class for invalid spec constraints at concretize time."""
|
@@ -49,10 +49,23 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
if prompt:
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if not old_prompt:
|
||||
old_prompt = os.environ.get("PROMPT")
|
||||
cmds += f'set "SPACK_OLD_PROMPT={old_prompt}"\n'
|
||||
cmds += f'set "PROMPT={prompt} $P$G"\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
if prompt:
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; if(!"$Env:SPACK_OLD_PROMPT") '
|
||||
'{$Env:SPACK_OLD_PROMPT="[spack] PS $pth>"}; '
|
||||
'"%s PS $pth>"}\n' % prompt
|
||||
)
|
||||
else:
|
||||
bash_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=True)
|
||||
zsh_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=False, zsh=True)
|
||||
@@ -107,10 +120,19 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if old_prompt:
|
||||
cmds += f'set "PROMPT={old_prompt}"\n'
|
||||
cmds += 'set "SPACK_OLD_PROMPT="\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; $spack_prompt = "[spack] $pth >"; '
|
||||
'if("$Env:SPACK_OLD_PROMPT") {$spack_prompt=$Env:SPACK_OLD_PROMPT};'
|
||||
" $spack_prompt}\n"
|
||||
)
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
@@ -27,11 +27,14 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
from typing import Callable, List, Mapping, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
@@ -219,6 +222,114 @@ def mirror_id(self):
|
||||
"""BundlePackages don't have a mirror id."""
|
||||
|
||||
|
||||
def _format_speed(total_bytes: int, elapsed: float) -> str:
|
||||
"""Return a human-readable average download speed string."""
|
||||
elapsed = 1 if elapsed <= 0 else elapsed # avoid divide by zero
|
||||
speed = total_bytes / elapsed
|
||||
if speed >= 1e9:
|
||||
return f"{speed / 1e9:6.1f} GB/s"
|
||||
elif speed >= 1e6:
|
||||
return f"{speed / 1e6:6.1f} MB/s"
|
||||
elif speed >= 1e3:
|
||||
return f"{speed / 1e3:6.1f} KB/s"
|
||||
return f"{speed:6.1f} B/s"
|
||||
|
||||
|
||||
def _format_bytes(total_bytes: int) -> str:
|
||||
"""Return a human-readable total bytes string."""
|
||||
if total_bytes >= 1e9:
|
||||
return f"{total_bytes / 1e9:7.2f} GB"
|
||||
elif total_bytes >= 1e6:
|
||||
return f"{total_bytes / 1e6:7.2f} MB"
|
||||
elif total_bytes >= 1e3:
|
||||
return f"{total_bytes / 1e3:7.2f} KB"
|
||||
return f"{total_bytes:7.2f} B"
|
||||
|
||||
|
||||
class FetchProgress:
|
||||
#: Characters to rotate in the spinner.
|
||||
spinner = ["|", "/", "-", "\\"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
total_bytes: Optional[int] = None,
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> None:
|
||||
"""Initialize a FetchProgress instance.
|
||||
Args:
|
||||
total_bytes: Total number of bytes to download, if known.
|
||||
enabled: Whether to print progress information.
|
||||
get_time: Function to get the current time."""
|
||||
#: Number of bytes downloaded so far.
|
||||
self.current_bytes = 0
|
||||
#: Delta time between progress prints
|
||||
self.delta = 0.1
|
||||
#: Whether to print progress information.
|
||||
self.enabled = enabled
|
||||
#: Function to get the current time.
|
||||
self.get_time = get_time
|
||||
#: Time of last progress print to limit output
|
||||
self.last_printed = 0.0
|
||||
#: Time of start of download
|
||||
self.start_time = get_time() if enabled else 0.0
|
||||
#: Total number of bytes to download, if known.
|
||||
self.total_bytes = total_bytes if total_bytes and total_bytes > 0 else 0
|
||||
#: Index of spinner character to print (used if total bytes is unknown)
|
||||
self.index = 0
|
||||
|
||||
@classmethod
|
||||
def from_headers(
|
||||
cls,
|
||||
headers: Mapping[str, str],
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> "FetchProgress":
|
||||
"""Create a FetchProgress instance from HTTP headers."""
|
||||
# headers.get is case-insensitive if it's from a HTTPResponse object.
|
||||
content_length = headers.get("Content-Length")
|
||||
try:
|
||||
total_bytes = int(content_length) if content_length else None
|
||||
except ValueError:
|
||||
total_bytes = None
|
||||
return cls(total_bytes=total_bytes, enabled=enabled, get_time=get_time)
|
||||
|
||||
def advance(self, num_bytes: int, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
self.current_bytes += num_bytes
|
||||
self.print(out=out)
|
||||
|
||||
def print(self, final: bool = False, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
current_time = self.get_time()
|
||||
if self.last_printed + self.delta < current_time or final:
|
||||
self.last_printed = current_time
|
||||
# print a newline if this is the final update
|
||||
maybe_newline = "\n" if final else ""
|
||||
# if we know the total bytes, show a percentage, otherwise a spinner
|
||||
if self.total_bytes > 0:
|
||||
percentage = min(100 * self.current_bytes / self.total_bytes, 100.0)
|
||||
percent_or_spinner = f"[{percentage:3.0f}%] "
|
||||
else:
|
||||
# only show the spinner if we are not at 100%
|
||||
if final:
|
||||
percent_or_spinner = "[100%] "
|
||||
else:
|
||||
percent_or_spinner = f"[ {self.spinner[self.index]} ] "
|
||||
self.index = (self.index + 1) % len(self.spinner)
|
||||
|
||||
print(
|
||||
f"\r {percent_or_spinner}{_format_bytes(self.current_bytes)} "
|
||||
f"@ {_format_speed(self.current_bytes, current_time - self.start_time)}"
|
||||
f"{maybe_newline}",
|
||||
end="",
|
||||
flush=True,
|
||||
file=out,
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
"""URLFetchStrategy pulls source code from a URL for an archive, check the
|
||||
@@ -316,7 +427,7 @@ def _check_headers(self, headers):
|
||||
tty.warn(msg)
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
def _fetch_urllib(self, url, chunk_size=65536):
|
||||
save_file = self.stage.save_filename
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
@@ -327,8 +438,15 @@ def _fetch_urllib(self, url):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
tty.msg(f"Fetching {url}")
|
||||
progress = FetchProgress.from_headers(response.headers, enabled=sys.stdout.isatty())
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
progress.advance(len(chunk))
|
||||
progress.print(final=True)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
|
@@ -566,10 +566,11 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
|
||||
|
||||
# Create a destination repository
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
if not os.path.exists(dest_repo_root):
|
||||
spack.repo.create_repo(dest_repo_root)
|
||||
repo = spack.repo.from_path(dest_repo_root)
|
||||
pkg_api = spack.repo.PATH.get_repo(node.namespace).package_api
|
||||
repo_root = os.path.join(path, node.namespace) if pkg_api < (2, 0) else path
|
||||
repo = spack.repo.create_or_construct(
|
||||
repo_root, namespace=node.namespace, package_api=pkg_api
|
||||
)
|
||||
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
|
@@ -20,6 +20,7 @@
|
||||
import signal
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import List, Tuple
|
||||
@@ -41,6 +42,7 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.debug
|
||||
@@ -372,6 +374,12 @@ def make_argument_parser(**kwargs):
|
||||
choices=("always", "never", "auto"),
|
||||
help="when to colorize output (default: auto)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-pager",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="do not run any output through a pager",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
@@ -534,6 +542,10 @@ def setup_main_options(args):
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
# override pager configuration (note ::)
|
||||
if args.no_pager:
|
||||
spack.config.set("config::pager", [], scope="command_line")
|
||||
|
||||
# override lock configuration if passed on command line
|
||||
if args.locks is not None:
|
||||
if args.locks is False:
|
||||
@@ -1046,6 +1058,10 @@ def main(argv=None):
|
||||
try:
|
||||
return _main(argv)
|
||||
|
||||
except spack.solver.asp.OutputDoesNotSatisfyInputError as e:
|
||||
_handle_solver_bug(e)
|
||||
return 1
|
||||
|
||||
except spack.error.SpackError as e:
|
||||
tty.debug(e)
|
||||
e.die() # gracefully die on any SpackErrors
|
||||
@@ -1069,5 +1085,45 @@ def main(argv=None):
|
||||
return 3
|
||||
|
||||
|
||||
def _handle_solver_bug(
|
||||
e: spack.solver.asp.OutputDoesNotSatisfyInputError, out=sys.stderr, root=None
|
||||
) -> None:
|
||||
# when the solver outputs specs that do not satisfy the input and spack is used as a command
|
||||
# line tool, we dump the incorrect output specs to json so users can upload them in bug reports
|
||||
wrong_output = [(input, output) for input, output in e.input_to_output if output is not None]
|
||||
no_output = [input for input, output in e.input_to_output if output is None]
|
||||
if no_output:
|
||||
tty.error(
|
||||
"internal solver error: the following specs were not solved:\n - "
|
||||
+ "\n - ".join(str(s) for s in no_output),
|
||||
stream=out,
|
||||
)
|
||||
if wrong_output:
|
||||
msg = (
|
||||
"internal solver error: the following specs were concretized, but do not satisfy the "
|
||||
"input:\n - "
|
||||
+ "\n - ".join(str(s) for s, _ in wrong_output)
|
||||
+ "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
)
|
||||
# try to write the input/output specs to a temporary directory for bug reports
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
|
||||
files = []
|
||||
for i, (input, output) in enumerate(wrong_output, start=1):
|
||||
in_file = os.path.join(tmpdir, f"input-{i}.json")
|
||||
out_file = os.path.join(tmpdir, f"output-{i}.json")
|
||||
files.append(in_file)
|
||||
files.append(out_file)
|
||||
with open(in_file, "w", encoding="utf-8") as f:
|
||||
input.to_json(f)
|
||||
with open(out_file, "w", encoding="utf-8") as f:
|
||||
output.to_json(f)
|
||||
|
||||
msg += " and attach the following files:\n - " + "\n - ".join(files)
|
||||
except Exception:
|
||||
msg += "."
|
||||
tty.error(msg, stream=out)
|
||||
|
||||
|
||||
class SpackCommandError(Exception):
|
||||
"""Raised when SpackCommand execution fails."""
|
||||
|
@@ -162,6 +162,7 @@ class tty:
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
make: MakeExecutable
|
||||
nmake: Executable
|
||||
ninja: MakeExecutable
|
||||
python_include: str
|
||||
python_platlib: str
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@@ -28,7 +27,7 @@
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.lang import ClassProperty, classproperty, memoized
|
||||
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
@@ -48,6 +47,7 @@
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.naming
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
import spack.variant
|
||||
@@ -701,10 +701,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
_verbose = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
homepage: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Default list URL (place to find available versions)
|
||||
list_url: Optional[str] = None
|
||||
list_url: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Link depth to which list_url should be searched for new versions
|
||||
list_depth = 0
|
||||
@@ -818,12 +818,12 @@ def package_dir(cls):
|
||||
|
||||
@classproperty
|
||||
def module(cls):
|
||||
"""Module object (not just the name) that this package is defined in.
|
||||
"""Module instance that this package class is defined in.
|
||||
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return importlib.import_module(cls.__module__)
|
||||
return sys.modules[cls.__module__]
|
||||
|
||||
@classproperty
|
||||
def namespace(cls):
|
||||
@@ -839,26 +839,36 @@ def fullname(cls):
|
||||
def fullnames(cls):
|
||||
"""Fullnames for this package and any packages from which it inherits."""
|
||||
fullnames = []
|
||||
for cls in cls.__mro__:
|
||||
namespace = getattr(cls, "namespace", None)
|
||||
if namespace:
|
||||
fullnames.append("%s.%s" % (namespace, cls.name))
|
||||
if namespace == "builtin":
|
||||
# builtin packages cannot inherit from other repos
|
||||
for base in cls.__mro__:
|
||||
if not spack.repo.is_package_module(base.__module__):
|
||||
break
|
||||
fullnames.append(base.fullname)
|
||||
return fullnames
|
||||
|
||||
@classproperty
|
||||
def name(cls):
|
||||
"""The name of this package.
|
||||
|
||||
The name of a package is the name of its Python module, without
|
||||
the containing module names.
|
||||
"""
|
||||
"""The name of this package."""
|
||||
if cls._name is None:
|
||||
cls._name = cls.module.__name__
|
||||
if "." in cls._name:
|
||||
cls._name = cls._name[cls._name.rindex(".") + 1 :]
|
||||
# We cannot know the exact package API version, but we can distinguish between v1
|
||||
# v2 based on the module. We don't want to figure out the exact package API version
|
||||
# since it requires parsing the repo.yaml.
|
||||
module = cls.__module__
|
||||
|
||||
if module.startswith(spack.repo.PKG_MODULE_PREFIX_V1):
|
||||
version = (1, 0)
|
||||
elif module.startswith(spack.repo.PKG_MODULE_PREFIX_V2):
|
||||
version = (2, 0)
|
||||
else:
|
||||
raise ValueError(f"Package {cls.__qualname__} is not a known Spack package")
|
||||
|
||||
if version < (2, 0):
|
||||
# spack.pkg.builtin.package_name.
|
||||
_, _, pkg_module = module.rpartition(".")
|
||||
else:
|
||||
# spack_repo.builtin.packages.package_name.package
|
||||
pkg_module = module.rsplit(".", 2)[-2]
|
||||
|
||||
cls._name = spack.util.naming.pkg_dir_to_pkg_name(pkg_module, version)
|
||||
return cls._name
|
||||
|
||||
@classproperty
|
||||
|
@@ -56,8 +56,9 @@
|
||||
|
||||
# read-only things in $spack/var/spack
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
packages_path = os.path.join(repos_path, "builtin")
|
||||
mock_packages_path = os.path.join(repos_path, "builtin.mock")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
@@ -47,40 +47,34 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
PKG_MODULE_PREFIX_V1 = "spack.pkg."
|
||||
PKG_MODULE_PREFIX_V2 = "spack_repo."
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
|
||||
For instance:
|
||||
|
||||
python_package_for_repo('builtin') == 'spack.pkg.builtin'
|
||||
|
||||
Args:
|
||||
namespace (str): repo namespace
|
||||
"""
|
||||
return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
|
||||
def is_package_module(fullname: str) -> bool:
|
||||
"""Check if the given module is a package module."""
|
||||
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
|
||||
|
||||
|
||||
def namespace_from_fullname(fullname):
|
||||
def namespace_from_fullname(fullname: str) -> str:
|
||||
"""Return the repository namespace only for the full module name.
|
||||
|
||||
For instance:
|
||||
|
||||
namespace_from_fullname('spack.pkg.builtin.hdf5') == 'builtin'
|
||||
namespace_from_fullname("spack.pkg.builtin.hdf5") == "builtin"
|
||||
namespace_from_fullname("spack_repo.x.y.z.packages.pkg_name.package") == "x.y.z"
|
||||
|
||||
Args:
|
||||
fullname (str): full name for the Python module
|
||||
fullname: full name for the Python module
|
||||
"""
|
||||
namespace, dot, module = fullname.rpartition(".")
|
||||
prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
|
||||
if namespace.startswith(prefix_and_dot):
|
||||
namespace = namespace[len(prefix_and_dot) :]
|
||||
return namespace
|
||||
if fullname.startswith(PKG_MODULE_PREFIX_V1):
|
||||
namespace, _, _ = fullname.rpartition(".")
|
||||
return namespace[len(PKG_MODULE_PREFIX_V1) :]
|
||||
elif fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package"):
|
||||
return ".".join(fullname.split(".")[1:-3])
|
||||
return fullname
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
@@ -92,14 +86,14 @@ def exec_module(self, module):
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
"""MetaPathFinder class that loads a Python module corresponding to an API v1 Spack package.
|
||||
|
||||
Returns a loader based on the inspection of the current repository list.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._repo_init = _path
|
||||
self._repo = None
|
||||
self._repo: Optional[RepoType] = None
|
||||
|
||||
@property
|
||||
def current_repository(self):
|
||||
@@ -127,7 +121,7 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
|
||||
|
||||
# Preferred API from https://peps.python.org/pep-0451/
|
||||
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
|
||||
if not fullname.startswith(PKG_MODULE_PREFIX_V1) and fullname != "spack.pkg":
|
||||
return None
|
||||
|
||||
loader = self.compute_loader(fullname)
|
||||
@@ -135,16 +129,17 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
def compute_loader(self, fullname: str):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
namespace, dot, module_name = fullname.rpartition(".")
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
|
||||
is_repo_path = isinstance(self.current_repository, RepoPath)
|
||||
current_repo = self.current_repository
|
||||
is_repo_path = isinstance(current_repo, RepoPath)
|
||||
if is_repo_path:
|
||||
repos = self.current_repository.repos
|
||||
repos = current_repo.repos
|
||||
else:
|
||||
repos = [self.current_repository]
|
||||
repos = [current_repo]
|
||||
|
||||
for repo in repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
@@ -161,7 +156,9 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
if is_repo_path and current_repo.by_namespace.is_prefix(
|
||||
fullname[len(PKG_MODULE_PREFIX_V1) :]
|
||||
):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -179,12 +176,12 @@ def compute_loader(self, fullname):
|
||||
NOT_PROVIDED = object()
|
||||
|
||||
|
||||
def packages_path():
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
return PATH.get_repo("builtin.mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -192,24 +189,25 @@ class GitExe:
|
||||
# invocations.
|
||||
#
|
||||
# Not using -C as that is not supported for git < 1.8.5.
|
||||
def __init__(self):
|
||||
def __init__(self, packages_path: str):
|
||||
self._git_cmd = spack.util.git.git(required=True)
|
||||
self.packages_dir = packages_path
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with working_dir(packages_path()):
|
||||
return self._git_cmd(*args, **kwargs)
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
with working_dir(self.packages_dir):
|
||||
return self._git_cmd(*args, **kwargs, output=str)
|
||||
|
||||
|
||||
def list_packages(rev):
|
||||
def list_packages(rev: str, repo: "Repo") -> List[str]:
|
||||
"""List all packages associated with the given revision"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
# git ls-tree does not support ... merge-base syntax, so do it manually
|
||||
if rev.endswith("..."):
|
||||
ref = rev.replace("...", "")
|
||||
rev = git("merge-base", ref, "HEAD", output=str).strip()
|
||||
rev = git("merge-base", ref, "HEAD").strip()
|
||||
|
||||
output = git("ls-tree", "-r", "--name-only", rev, output=str)
|
||||
output = git("ls-tree", "-r", "--name-only", rev)
|
||||
|
||||
# recursively list the packages directory
|
||||
package_paths = [
|
||||
@@ -217,54 +215,54 @@ def list_packages(rev):
|
||||
]
|
||||
|
||||
# take the directory names with one-level-deep package files
|
||||
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
|
||||
package_names = [
|
||||
nm.pkg_dir_to_pkg_name(line[0], repo.package_api)
|
||||
for line in package_paths
|
||||
if len(line) == 2
|
||||
]
|
||||
|
||||
return package_names
|
||||
return sorted(set(package_names))
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
def diff_packages(rev1: str, rev2: str, repo: "Repo") -> Tuple[Set[str], Set[str]]:
|
||||
"""Compute packages lists for the two revisions and return a tuple
|
||||
containing all the packages in rev1 but not in rev2 and all the
|
||||
packages in rev2 but not in rev1."""
|
||||
p1 = set(list_packages(rev1))
|
||||
p2 = set(list_packages(rev2))
|
||||
p1 = set(list_packages(rev1, repo))
|
||||
p2 = set(list_packages(rev2, repo))
|
||||
return p1.difference(p2), p2.difference(p1)
|
||||
|
||||
|
||||
def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
"""Show packages changed, added, or removed (or any combination of those)
|
||||
since a commit.
|
||||
def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -> Set[str]:
|
||||
"""Get packages changed, added, or removed (or any combination of those) since a commit.
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str): String containing one or more of 'A', 'R', 'C'
|
||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||
|
||||
Returns:
|
||||
|
||||
A set contain names of affected packages.
|
||||
type: String containing one or more of 'A', 'R', 'C'
|
||||
rev1: Revision to compare against, default is 'HEAD^'
|
||||
rev2: Revision to compare to rev1, default is 'HEAD'
|
||||
"""
|
||||
lower_type = type.lower()
|
||||
if not re.match("^[arc]*$", lower_type):
|
||||
tty.die(
|
||||
"Invald change type: '%s'." % type,
|
||||
"Can contain only A (added), R (removed), or C (changed)",
|
||||
f"Invalid change type: '{type}'. "
|
||||
"Can contain only A (added), R (removed), or C (changed)"
|
||||
)
|
||||
|
||||
removed, added = diff_packages(rev1, rev2)
|
||||
removed, added = diff_packages(rev1, rev2, repo)
|
||||
|
||||
git = GitExe()
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
|
||||
git = GitExe(repo.packages_path)
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2).strip()
|
||||
|
||||
lines = [] if not out else re.split(r"\s+", out)
|
||||
changed = set()
|
||||
changed: Set[str] = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition("/")
|
||||
dir_name, _, _ = path.partition("/")
|
||||
pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api)
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
packages = set()
|
||||
packages: Set[str] = set()
|
||||
if "a" in lower_type:
|
||||
packages |= added
|
||||
if "r" in lower_type:
|
||||
@@ -275,14 +273,14 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
return packages
|
||||
|
||||
|
||||
def add_package_to_git_stage(packages):
|
||||
def add_package_to_git_stage(packages: List[str], repo: "Repo") -> None:
|
||||
"""add a package to the git stage with `git add`"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
tty.die(f"No such package: {pkg_name}. Path does not exist:", filename)
|
||||
|
||||
git("add", filename)
|
||||
|
||||
@@ -352,9 +350,10 @@ class FastPackageChecker(collections.abc.Mapping):
|
||||
#: Global cache, reused by every instance
|
||||
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
|
||||
|
||||
def __init__(self, packages_path):
|
||||
def __init__(self, packages_path: str, package_api: Tuple[int, int]):
|
||||
# The path of the repository managed by this instance
|
||||
self.packages_path = packages_path
|
||||
self.package_api = package_api
|
||||
|
||||
# If the cache we need is not there yet, then build it appropriately
|
||||
if packages_path not in self._paths_cache:
|
||||
@@ -379,41 +378,38 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
# Create a dictionary that will store the mapping between a
|
||||
# package name and its stat info
|
||||
cache: Dict[str, os.stat_result] = {}
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
with os.scandir(self.packages_path) as entries:
|
||||
for entry in entries:
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(entry.path, package_file_name)
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno in (errno.ENOENT, errno.ENOTDIR):
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn(f"Can't read package file {pkg_file}.")
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if not stat.S_ISREG(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# Only consider package.py files in directories that are valid module names under
|
||||
# the current package API
|
||||
if not nm.valid_module_name(entry.name, self.package_api):
|
||||
x, y = self.package_api
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
f"Package {pkg_file} cannot be used because `{entry.name}` is not a valid "
|
||||
f"Spack package module name for Package API v{x}.{y}."
|
||||
)
|
||||
continue
|
||||
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
|
||||
|
||||
# Use stat here to avoid lots of calls to the filesystem.
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn("Can't read package file %s." % pkg_file)
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if stat.S_ISDIR(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# If it is a file, then save the stats under the
|
||||
# appropriate key
|
||||
cache[pkg_name] = sinfo
|
||||
# Store the stat info by package name.
|
||||
cache[nm.pkg_dir_to_pkg_name(entry.name, self.package_api)] = sinfo
|
||||
|
||||
return cache
|
||||
|
||||
@@ -688,7 +684,7 @@ def put_first(self, repo: "Repo") -> None:
|
||||
return
|
||||
|
||||
self.repos.insert(0, repo)
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def put_last(self, repo):
|
||||
"""Add repo last in the search path."""
|
||||
@@ -700,8 +696,8 @@ def put_last(self, repo):
|
||||
self.repos.append(repo)
|
||||
|
||||
# don't mask any higher-precedence repos with same namespace
|
||||
if repo.full_namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
if repo.namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def remove(self, repo):
|
||||
"""Remove a repo from the search path."""
|
||||
@@ -710,10 +706,9 @@ def remove(self, repo):
|
||||
|
||||
def get_repo(self, namespace: str) -> "Repo":
|
||||
"""Get a repository by namespace."""
|
||||
full_namespace = python_package_for_repo(namespace)
|
||||
if full_namespace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace)
|
||||
return self.by_namespace[full_namespace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
def first_repo(self) -> Optional["Repo"]:
|
||||
"""Get the first repo in precedence order."""
|
||||
@@ -821,10 +816,9 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
|
||||
# If the spec already has a namespace, then return the
|
||||
# corresponding repo if we know about it.
|
||||
if namespace:
|
||||
fullspace = python_package_for_repo(namespace)
|
||||
if fullspace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace, name=name)
|
||||
return self.by_namespace[fullspace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
# If there's no namespace, search in the RepoPath.
|
||||
for repo in self.repos:
|
||||
@@ -845,8 +839,15 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
return self.repo_for_pkg(spec).get(spec)
|
||||
|
||||
def python_paths(self) -> List[str]:
|
||||
"""Return a list of all the Python paths in the repos."""
|
||||
return [repo.python_path for repo in self.repos if repo.python_path]
|
||||
|
||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
||||
"""Find a class for the spec's package and return the class object."""
|
||||
for p in self.python_paths():
|
||||
if p not in sys.path:
|
||||
sys.path.insert(0, p)
|
||||
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
|
||||
|
||||
@autospec
|
||||
@@ -942,6 +943,30 @@ def _parse_package_api_version(
|
||||
)
|
||||
|
||||
|
||||
def _validate_and_normalize_subdir(subdir: Any, root: str, package_api: Tuple[int, int]) -> str:
|
||||
if not isinstance(subdir, str):
|
||||
raise BadRepoError(f"Invalid subdirectory '{subdir}' in '{root}'. Must be a string")
|
||||
|
||||
if package_api < (2, 0):
|
||||
return subdir # In v1.x we did not validate subdir names
|
||||
|
||||
if subdir in (".", ""):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Use a symlink packages -> . instead"
|
||||
)
|
||||
|
||||
# Otherwise we expect a directory name (not path) that can be used as a Python module.
|
||||
if os.sep in subdir:
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Expected a directory name, not a path"
|
||||
)
|
||||
if not nm.valid_module_name(subdir, package_api):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Must be a valid Python module name"
|
||||
)
|
||||
return subdir
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
@@ -962,6 +987,8 @@ class Repo:
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
namespace: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
@@ -991,32 +1018,79 @@ def check(condition, msg):
|
||||
|
||||
# Read configuration and validate namespace
|
||||
config = self._read_config()
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
self.subdirectory = _validate_and_normalize_subdir(
|
||||
config.get("subdirectory", packages_dir_name), root, self.package_api
|
||||
)
|
||||
self.packages_path = os.path.join(self.root, self.subdirectory)
|
||||
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
os.path.isdir(self.packages_path),
|
||||
f"No directory '{self.subdirectory}' found in '{root}'",
|
||||
)
|
||||
|
||||
self.namespace: str = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
# The parent dir of spack_repo/ which should be added to sys.path for api v2.x
|
||||
self.python_path: Optional[str] = None
|
||||
|
||||
if self.package_api < (2, 0):
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
self.namespace = config["namespace"]
|
||||
# Note: for Package API v1.x the namespace validation always had bugs, which won't be
|
||||
# fixed for compatibility reasons. The regex is missing "$" at the end, and it claims
|
||||
# to test for valid identifiers, but fails to split on `.` first.
|
||||
check(
|
||||
isinstance(self.namespace, str)
|
||||
and re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
else:
|
||||
# From Package API v2.0 the namespace follows from the directory structure.
|
||||
check(
|
||||
f"{os.sep}spack_repo{os.sep}" in self.root,
|
||||
f"Invalid repository path '{self.root}'. "
|
||||
f"Path must contain 'spack_repo{os.sep}'",
|
||||
)
|
||||
derived_namespace = self.root.rpartition(f"spack_repo{os.sep}")[2].replace(os.sep, ".")
|
||||
if "namespace" in config:
|
||||
self.namespace = config["namespace"]
|
||||
|
||||
check(
|
||||
isinstance(self.namespace, str) and self.namespace == derived_namespace,
|
||||
f"Namespace '{self.namespace}' should be {derived_namespace} or omitted in "
|
||||
f"{os.path.join(root, repo_config_name)}",
|
||||
)
|
||||
else:
|
||||
self.namespace = derived_namespace
|
||||
|
||||
# strip the namespace directories from the root path to get the python path
|
||||
# e.g. /my/pythonpath/spack_repo/x/y/z -> /my/pythonpath
|
||||
python_path = self.root
|
||||
for _ in self.namespace.split("."):
|
||||
python_path = os.path.dirname(python_path)
|
||||
self.python_path = os.path.dirname(python_path)
|
||||
|
||||
# check that all subdirectories are valid module names
|
||||
check(
|
||||
all(nm.valid_module_name(x, self.package_api) for x in self.namespace.split(".")),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'",
|
||||
)
|
||||
|
||||
# Set up 'full_namespace' to include the super-namespace
|
||||
self.full_namespace = python_package_for_repo(self.namespace)
|
||||
if self.package_api < (2, 0):
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V1}{self.namespace}"
|
||||
elif self.subdirectory == ".":
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}"
|
||||
else:
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}.{self.subdirectory}"
|
||||
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
@@ -1030,27 +1104,36 @@ def check(condition, msg):
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
self._cache = cache
|
||||
|
||||
@property
|
||||
def package_api_str(self) -> str:
|
||||
return f"v{self.package_api[0]}.{self.package_api[1]}"
|
||||
|
||||
def finder(self, value: RepoPath) -> None:
|
||||
self._finder = value
|
||||
|
||||
def real_name(self, import_name: str) -> Optional[str]:
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
||||
A python identifier might map to many different Spack package
|
||||
names due to hyphen/underscore ambiguity.
|
||||
In Package API v1.x, there was no canonical module name for a package, and package's dir
|
||||
was not necessarily a valid Python module name. For that case we have to guess the actual
|
||||
package directory. From Package API v2.0 there is a one-to-one mapping between Spack
|
||||
package names and Python module names, so there is no guessing.
|
||||
|
||||
Easy example:
|
||||
num3proxy -> 3proxy
|
||||
|
||||
Ambiguous:
|
||||
For Packge API v1.x we support the following one-to-many mappings:
|
||||
num3proxy -> 3proxy
|
||||
foo_bar -> foo_bar, foo-bar
|
||||
|
||||
More ambiguous:
|
||||
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
|
||||
"""
|
||||
if self.package_api >= (2, 0):
|
||||
if nm.pkg_dir_to_pkg_name(import_name, package_api=self.package_api) in self:
|
||||
return import_name
|
||||
return None
|
||||
|
||||
if import_name in self:
|
||||
return import_name
|
||||
|
||||
# For v1 generate the possible package names from a module name, and return the first
|
||||
# package name that exists in this repo.
|
||||
options = nm.possible_spack_module_names(import_name)
|
||||
try:
|
||||
options.remove(import_name)
|
||||
@@ -1183,7 +1266,9 @@ def extensions_for(
|
||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Given a package name, get the directory containing its package.py file."""
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(unqualified_name, self.package_api)
|
||||
)
|
||||
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Get the filename for the module we should load for a particular
|
||||
@@ -1200,7 +1285,7 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
@property
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path, self.package_api)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
@@ -1212,7 +1297,9 @@ def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
|
||||
def package_path(self, name: str) -> str:
|
||||
"""Get path to package.py file for this repo."""
|
||||
return os.path.join(self.packages_path, name, package_file_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(name, self.package_api), package_file_name
|
||||
)
|
||||
|
||||
def all_package_paths(self) -> Generator[str, None, None]:
|
||||
for name in self.all_package_names():
|
||||
@@ -1270,15 +1357,19 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
||||
package. Then extracts the package class from the module
|
||||
according to Spack's naming convention.
|
||||
"""
|
||||
namespace, pkg_name = self.partition_package_name(pkg_name)
|
||||
class_name = nm.mod_to_class(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
_, pkg_name = self.partition_package_name(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{nm.pkg_name_to_pkg_dir(pkg_name, self.package_api)}"
|
||||
if self.package_api >= (2, 0):
|
||||
fullname += ".package"
|
||||
|
||||
class_name = nm.pkg_name_to_class_name(pkg_name)
|
||||
if self.python_path and self.python_path not in sys.path:
|
||||
sys.path.insert(0, self.python_path)
|
||||
try:
|
||||
with REPOS_FINDER.switch_repo(self._finder or self):
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(fullname)
|
||||
except ImportError as e:
|
||||
raise UnknownPackageError(fullname) from e
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
@@ -1369,46 +1460,71 @@ def partition_package_name(pkg_name: str) -> Tuple[str, str]:
|
||||
return namespace, pkg_name
|
||||
|
||||
|
||||
def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
def get_repo_yaml_dir(
|
||||
root: str, namespace: Optional[str], package_api: Tuple[int, int]
|
||||
) -> Tuple[str, str]:
|
||||
"""Returns the directory where repo.yaml is located and the effective namespace."""
|
||||
if package_api < (2, 0):
|
||||
namespace = namespace or os.path.basename(root)
|
||||
# This ad-hoc regex is left for historical reasons, and should not have a breaking change.
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace.")
|
||||
return root, namespace
|
||||
|
||||
# Package API v2 has <root>/spack_repo/<namespace>/<subdir> structure and requires a namespace
|
||||
if namespace is None:
|
||||
raise InvalidNamespaceError("Namespace must be provided.")
|
||||
|
||||
# if namespace has dots those translate to subdirs of further namespace packages.
|
||||
namespace_components = namespace.split(".")
|
||||
|
||||
if not all(nm.valid_module_name(n, package_api=package_api) for n in namespace_components):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace." % namespace)
|
||||
|
||||
return os.path.join(root, "spack_repo", *namespace_components), namespace
|
||||
|
||||
|
||||
def create_repo(
|
||||
root,
|
||||
namespace: Optional[str] = None,
|
||||
subdir: str = packages_dir_name,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[str, str]:
|
||||
"""Create a new repository in root with the specified namespace.
|
||||
|
||||
If the namespace is not provided, use basename of root.
|
||||
Return the canonicalized path and namespace of the created repository.
|
||||
"""
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
if not namespace:
|
||||
namespace = os.path.basename(root)
|
||||
repo_yaml_dir, namespace = get_repo_yaml_dir(os.path.abspath(root), namespace, package_api)
|
||||
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
|
||||
existed = True
|
||||
try:
|
||||
dir_entry = next(os.scandir(repo_yaml_dir), None)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
existed = False
|
||||
dir_entry = None
|
||||
else:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: {e}")
|
||||
|
||||
existed = False
|
||||
if os.path.exists(root):
|
||||
if os.path.isfile(root):
|
||||
raise BadRepoError("File %s already exists and is not a directory" % root)
|
||||
elif os.path.isdir(root):
|
||||
if not os.access(root, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
|
||||
if os.listdir(root):
|
||||
raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
|
||||
existed = True
|
||||
if dir_entry is not None:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: directory is not empty.")
|
||||
|
||||
full_path = os.path.realpath(root)
|
||||
parent = os.path.dirname(full_path)
|
||||
if not os.access(parent, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
|
||||
config_path = os.path.join(repo_yaml_dir, repo_config_name)
|
||||
|
||||
subdir = _validate_and_normalize_subdir(subdir, root, package_api)
|
||||
|
||||
packages_path = os.path.join(repo_yaml_dir, subdir)
|
||||
|
||||
try:
|
||||
config_path = os.path.join(root, repo_config_name)
|
||||
packages_path = os.path.join(root, subdir)
|
||||
|
||||
fs.mkdirp(packages_path)
|
||||
with open(config_path, "w", encoding="utf-8") as config:
|
||||
config.write("repo:\n")
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
x, y = spack.package_api_version
|
||||
x, y = package_api
|
||||
config.write(f" api: v{x}.{y}\n")
|
||||
|
||||
except OSError as e:
|
||||
@@ -1421,22 +1537,27 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
|
||||
raise BadRepoError(
|
||||
"Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
|
||||
)
|
||||
) from e
|
||||
|
||||
return full_path, namespace
|
||||
return repo_yaml_dir, namespace
|
||||
|
||||
|
||||
def from_path(path: str) -> "Repo":
|
||||
def from_path(path: str) -> Repo:
|
||||
"""Returns a repository from the path passed as input. Injects the global misc cache."""
|
||||
return Repo(path, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def create_or_construct(path, namespace=None):
|
||||
def create_or_construct(
|
||||
root: str,
|
||||
namespace: Optional[str] = None,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Repo:
|
||||
"""Create a repository, or just return a Repo if it already exists."""
|
||||
if not os.path.exists(path):
|
||||
fs.mkdirp(path)
|
||||
create_repo(path, namespace)
|
||||
return from_path(path)
|
||||
repo_yaml_dir, _ = get_repo_yaml_dir(root, namespace, package_api)
|
||||
if not os.path.exists(repo_yaml_dir):
|
||||
fs.mkdirp(root)
|
||||
create_repo(root, namespace=namespace, package_api=package_api)
|
||||
return from_path(repo_yaml_dir)
|
||||
|
||||
|
||||
def _path(configuration=None):
|
||||
@@ -1514,8 +1635,10 @@ class MockRepositoryBuilder:
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
def __init__(self, root_directory, namespace=None):
|
||||
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
|
||||
self.root, self.namespace = create_repo(str(root_directory), namespace)
|
||||
namespace = namespace or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
|
||||
repo_root = os.path.join(root_directory, namespace)
|
||||
os.mkdir(repo_root)
|
||||
self.root, self.namespace = create_repo(repo_root, namespace)
|
||||
|
||||
def add_package(self, name, dependencies=None):
|
||||
"""Create a mock package in the repository, using a Jinja2 template.
|
||||
@@ -1527,7 +1650,7 @@ def add_package(self, name, dependencies=None):
|
||||
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
|
||||
"""
|
||||
dependencies = dependencies or []
|
||||
context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
|
||||
context = {"cls_name": nm.pkg_name_to_class_name(name), "dependencies": dependencies}
|
||||
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
@@ -1539,8 +1662,10 @@ def remove(self, name):
|
||||
package_py = self.recipe_filename(name)
|
||||
shutil.rmtree(os.path.dirname(package_py))
|
||||
|
||||
def recipe_filename(self, name):
|
||||
return os.path.join(self.root, "packages", name, "package.py")
|
||||
def recipe_filename(self, name: str):
|
||||
return os.path.join(
|
||||
self.root, "packages", nm.pkg_name_to_pkg_dir(name, package_api=(2, 0)), "package.py"
|
||||
)
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
@@ -1590,7 +1715,10 @@ def __init__(self, name, repo=None):
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
try:
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
except Exception:
|
||||
similar = []
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg += "\n\nDid you mean one of the following packages?\n "
|
||||
|
@@ -104,6 +104,7 @@
|
||||
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
|
||||
"pager": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"deprecatedProperties": [
|
||||
{
|
||||
|
@@ -287,9 +287,33 @@ def specify(spec):
|
||||
return spack.spec.Spec(spec)
|
||||
|
||||
|
||||
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
|
||||
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
|
||||
def remove_facts(
|
||||
*to_be_removed: str,
|
||||
) -> Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]:
|
||||
"""Returns a transformation function that removes facts from the input list of facts."""
|
||||
|
||||
def _remove(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
return list(filter(lambda x: x.args[0] not in to_be_removed, facts))
|
||||
|
||||
return _remove
|
||||
|
||||
|
||||
def remove_build_deps(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
build_deps = {x.args[2]: x.args[1] for x in facts if x.args[0] == "depends_on"}
|
||||
result = []
|
||||
for x in facts:
|
||||
current_name = x.args[1]
|
||||
if current_name in build_deps:
|
||||
x.name = "build_requirement"
|
||||
result.append(fn.attr("build_requirement", build_deps[current_name], x))
|
||||
continue
|
||||
|
||||
if x.args[0] == "depends_on":
|
||||
continue
|
||||
|
||||
result.append(x)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def all_libcs() -> Set[spack.spec.Spec]:
|
||||
@@ -1287,12 +1311,8 @@ def on_model(model):
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||
)
|
||||
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
|
||||
|
||||
if conc_cache_enabled:
|
||||
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
||||
concretization_stats = self.control.statistics
|
||||
@@ -1735,15 +1755,17 @@ def define_variant(
|
||||
pkg_fact(fn.variant_condition(name, vid, cond_id))
|
||||
|
||||
# record type so we can construct the variant when we read it back in
|
||||
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.value))
|
||||
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.string))
|
||||
|
||||
if variant_def.sticky:
|
||||
pkg_fact(fn.variant_sticky(vid))
|
||||
|
||||
# define defaults for this variant definition
|
||||
defaults = variant_def.make_default().value if variant_def.multi else [variant_def.default]
|
||||
for val in sorted(defaults):
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
|
||||
if variant_def.multi:
|
||||
for val in sorted(variant_def.make_default().values):
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
|
||||
else:
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, variant_def.default))
|
||||
|
||||
# define possible values for this variant definition
|
||||
values = variant_def.values
|
||||
@@ -1771,7 +1793,9 @@ def define_variant(
|
||||
|
||||
# make a spec indicating whether the variant has this conditional value
|
||||
variant_has_value = spack.spec.Spec()
|
||||
variant_has_value.variants[name] = vt.VariantBase(name, value.value)
|
||||
variant_has_value.variants[name] = vt.VariantValue(
|
||||
vt.VariantType.MULTI, name, (value.value,)
|
||||
)
|
||||
|
||||
if value.when:
|
||||
# the conditional value is always "possible", but it imposes its when condition as
|
||||
@@ -1884,7 +1908,7 @@ def condition(
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_node
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
@@ -1984,7 +2008,7 @@ def track_dependencies(input_spec, requirements):
|
||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||
|
||||
def dependency_holds(input_spec, requirements):
|
||||
result = remove_node(input_spec, requirements) + [
|
||||
result = remove_facts("node", "virtual_node")(input_spec, requirements) + [
|
||||
fn.attr(
|
||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||
)
|
||||
@@ -2174,7 +2198,10 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
pkg_name, ConstraintOrigin.REQUIRE
|
||||
)
|
||||
if not virtual:
|
||||
context.transform_imposed = remove_node
|
||||
context.transform_required = remove_build_deps
|
||||
context.transform_imposed = remove_facts(
|
||||
"node", "virtual_node", "depends_on"
|
||||
)
|
||||
# else: for virtuals we want to emit "node" and
|
||||
# "virtual_node" in imposed specs
|
||||
|
||||
@@ -2236,16 +2263,18 @@ def external_packages(self):
|
||||
if pkg_name not in self.pkgs:
|
||||
continue
|
||||
|
||||
self.gen.h2(f"External package: {pkg_name}")
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
external_buildable = data.get("buildable", True)
|
||||
externals = data.get("externals", [])
|
||||
if not external_buildable or externals:
|
||||
self.gen.h2(f"External package: {pkg_name}")
|
||||
|
||||
if not external_buildable:
|
||||
self.gen.fact(fn.buildable_false(pkg_name))
|
||||
|
||||
# Read a list of all the specs for this package
|
||||
externals = data.get("externals", [])
|
||||
candidate_specs = [
|
||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||
]
|
||||
@@ -2334,6 +2363,8 @@ def preferred_variants(self, pkg_name):
|
||||
if not preferred_variants:
|
||||
return
|
||||
|
||||
self.gen.h2(f"Package preferences: {pkg_name}")
|
||||
|
||||
for variant_name in sorted(preferred_variants):
|
||||
variant = preferred_variants[variant_name]
|
||||
|
||||
@@ -2346,7 +2377,7 @@ def preferred_variants(self, pkg_name):
|
||||
)
|
||||
continue
|
||||
|
||||
for value in variant.value_as_tuple:
|
||||
for value in variant.values:
|
||||
for variant_def in variant_defs:
|
||||
self.variant_values_from_specs.add((pkg_name, id(variant_def), value))
|
||||
self.gen.fact(
|
||||
@@ -2461,10 +2492,10 @@ def _spec_clauses(
|
||||
# TODO: variant="*" means 'variant is defined to something', which used to
|
||||
# be meaningless in concretization, as all variants had to be defined. But
|
||||
# now that variants can be conditional, it should force a variant to exist.
|
||||
if variant.value == ("*",):
|
||||
if not variant.values:
|
||||
continue
|
||||
|
||||
for value in variant.value_as_tuple:
|
||||
for value in variant.values:
|
||||
# ensure that the value *can* be valid for the spec
|
||||
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
|
||||
variant_defs = vt.prevalidate_variant_value(
|
||||
@@ -2574,6 +2605,16 @@ def _spec_clauses(
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.depflag != dt.BUILD:
|
||||
clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
|
||||
elif not concrete_build_deps and dspec.depflag:
|
||||
clauses.append(
|
||||
fn.attr(
|
||||
"concrete_build_dependency", spec.name, dep.name, dep.dag_hash()
|
||||
)
|
||||
)
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
fn.attr("virtual_on_build_edge", spec.name, dep.name, virtual_name)
|
||||
)
|
||||
|
||||
# if the spec is abstract, descend into dependencies.
|
||||
# if it's concrete, then the hashes above take care of dependency
|
||||
@@ -3128,7 +3169,6 @@ def setup(
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2("Package preferences: %s" % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
|
||||
self.gen.h1("Special variants")
|
||||
@@ -3200,12 +3240,13 @@ def define_runtime_constraints(self) -> List[spack.spec.Spec]:
|
||||
|
||||
# FIXME (compiler as nodes): think of using isinstance(compiler_cls, WrappedCompiler)
|
||||
# Add a dependency on the compiler wrapper
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%{compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler}",
|
||||
)
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%[virtuals={language}] {compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler} for {language}",
|
||||
)
|
||||
|
||||
if not using_libc_compatibility():
|
||||
continue
|
||||
@@ -3267,15 +3308,13 @@ def literal_specs(self, specs):
|
||||
# These facts are needed to compute the "condition_set" of the root
|
||||
pkg_name = clause.args[1]
|
||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||
elif clause_name == "depends_on":
|
||||
pkg_name = clause.args[2]
|
||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||
|
||||
requirements.append(
|
||||
fn.attr(
|
||||
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
|
||||
)
|
||||
)
|
||||
requirements = [x for x in requirements if x.args[0] != "depends_on"]
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
@@ -3600,11 +3639,9 @@ def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
# (avoid adding virtuals everywhere, if a single edge needs it)
|
||||
_, provider, virtual = clause.args
|
||||
clause.args = "virtual_on_edge", node_placeholder, provider, virtual
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
body_str = ",\n".join(f" {x}" for x in body_clauses)
|
||||
body_str += f",\n not external({node_variable})"
|
||||
body_str = body_str.replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
for old, replacement in when_substitutions.items():
|
||||
body_str = body_str.replace(old, replacement)
|
||||
return body_str, node_variable
|
||||
@@ -3795,13 +3832,13 @@ def node_os(self, node, os):
|
||||
def node_target(self, node, target):
|
||||
self._arch(node).target = target
|
||||
|
||||
def variant_selected(self, node, name, value, variant_type, variant_id):
|
||||
def variant_selected(self, node, name: str, value: str, variant_type: str, variant_id):
|
||||
spec = self._specs[node]
|
||||
variant = spec.variants.get(name)
|
||||
if not variant:
|
||||
spec.variants[name] = vt.VariantType(variant_type).variant_class(name, value)
|
||||
spec.variants[name] = vt.VariantValue.from_concretizer(name, value, variant_type)
|
||||
else:
|
||||
assert variant_type == vt.VariantType.MULTI.value, (
|
||||
assert variant_type == "multi", (
|
||||
f"Can't have multiple values for single-valued variant: "
|
||||
f"{node}, {name}, {value}, {variant_type}, {variant_id}"
|
||||
)
|
||||
@@ -3825,6 +3862,17 @@ def external_spec_selected(self, node, idx):
|
||||
)
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# Annotate compiler specs from externals
|
||||
external_spec = spack.spec.Spec(spec_info["spec"])
|
||||
external_spec_deps = external_spec.dependencies()
|
||||
if len(external_spec_deps) > 1:
|
||||
raise InvalidExternalError(
|
||||
f"external spec {spec_info['spec']} cannot have more than one dependency"
|
||||
)
|
||||
elif len(external_spec_deps) == 1:
|
||||
compiler_str = external_spec_deps[0]
|
||||
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
@@ -4180,10 +4228,10 @@ def _inject_patches_variant(root: spack.spec.Spec) -> None:
|
||||
continue
|
||||
|
||||
patches = list(spec_to_patches[id(spec)])
|
||||
variant: vt.MultiValuedVariant = spec.variants.setdefault(
|
||||
variant: vt.VariantValue = spec.variants.setdefault(
|
||||
"patches", vt.MultiValuedVariant("patches", ())
|
||||
)
|
||||
variant.value = tuple(p.sha256 for p in patches)
|
||||
variant.set(*(p.sha256 for p in patches))
|
||||
# FIXME: Monkey patches variant to store patches order
|
||||
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
|
||||
ordered_hashes.sort()
|
||||
@@ -4651,13 +4699,9 @@ def solve_in_rounds(
|
||||
break
|
||||
|
||||
if not result.specs:
|
||||
# This is also a problem: no specs were solved for, which
|
||||
# means we would be in a loop if we tried again
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: a subset of input specs could not"
|
||||
f" be solved for.\n\t{unsolved_str}"
|
||||
)
|
||||
# This is also a problem: no specs were solved for, which means we would be in a
|
||||
# loop if we tried again
|
||||
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
|
||||
|
||||
input_specs = list(x for (x, y) in result.unsolved_specs)
|
||||
for spec in result.specs:
|
||||
@@ -4687,6 +4731,19 @@ def __init__(self, msg):
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class OutputDoesNotSatisfyInputError(InternalConcretizerError):
|
||||
|
||||
def __init__(
|
||||
self, input_to_output: List[Tuple[spack.spec.Spec, Optional[spack.spec.Spec]]]
|
||||
) -> None:
|
||||
self.input_to_output = input_to_output
|
||||
super().__init__(
|
||||
"internal solver error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{Result.format_unsolved(input_to_output)}"
|
||||
)
|
||||
|
||||
|
||||
class SolverError(InternalConcretizerError):
|
||||
"""For cases where the solver is unable to produce a solution.
|
||||
|
||||
@@ -4719,3 +4776,7 @@ class InvalidSpliceError(spack.error.SpackError):
|
||||
|
||||
class NoCompilerFoundError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
||||
|
||||
class InvalidExternalError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
@@ -175,12 +175,24 @@ trigger_node(TriggerID, Node, Node) :-
|
||||
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
|
||||
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
|
||||
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
1 { literal_node(Root, node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
mentioned_in_literal(Root, Mentioned), Mentioned != Root,
|
||||
internal_error("must have exactly one condition_set for literals").
|
||||
|
||||
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
|
||||
literal_node(Root, LiteralNode),
|
||||
build(LiteralNode),
|
||||
not external(LiteralNode),
|
||||
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
|
||||
condition_set(LiteralNode, BuildNode) :- build_dependency_of_literal_node(LiteralNode, BuildNode).
|
||||
|
||||
:- build_dependency_of_literal_node(LiteralNode, BuildNode),
|
||||
not attr("depends_on", LiteralNode, BuildNode, "build").
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
solve_literal(TriggerID),
|
||||
@@ -472,10 +484,53 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
||||
imposed_constraint(ID, "depends_on", A1, A2, A3),
|
||||
internal_error("Build deps must land in exactly one duplicate").
|
||||
|
||||
1 { build_requirement(node(X, Parent), node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1
|
||||
% If the parent is built, then we have a build_requirement on another node. For concrete nodes,
|
||||
% or external nodes, we don't since we are trimming their build dependencies.
|
||||
1 { attr("depends_on", node(X, Parent), node(0..Y-1, BuildDependency), "build") : max_dupes(BuildDependency, Y) } 1
|
||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||
impose(ID, node(X, Parent)),
|
||||
imposed_constraint(ID,"build_requirement",Parent,_).
|
||||
build(node(X, Parent)),
|
||||
not external(node(X, Parent)).
|
||||
|
||||
% Concrete nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
concrete(ParentNode),
|
||||
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
|
||||
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
:- attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
% External nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
external(ParentNode),
|
||||
not attr("external_build_requirement", ParentNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
candidate_external_version(Constraint, BuildDependency, Version)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
external(ParentNode),
|
||||
pkg_fact(BuildDependency, version_satisfies(Constraint, Version)).
|
||||
|
||||
error(100, "External {0} cannot satisfy both {1} and {2}", BuildDependency, LiteralConstraint, ExternalConstraint)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, LiteralConstraint)),
|
||||
external(ParentNode),
|
||||
attr("external_build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, ExternalConstraint)),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(ExternalConstraint, Version)) : candidate_external_version(LiteralConstraint, BuildDependency, Version) }.
|
||||
|
||||
|
||||
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
|
||||
% we have on externals
|
||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||
Parent == BuildDependency,
|
||||
external(node(X, Parent)).
|
||||
|
||||
build_requirement(node(X, Parent), node(Y, BuildDependency)) :-
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build"),
|
||||
attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)).
|
||||
|
||||
1 { virtual_build_requirement(ParentNode, node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1
|
||||
:- attr("dependency_holds", ParentNode, Virtual, "build"),
|
||||
@@ -496,7 +551,6 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
|
||||
|
||||
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
|
||||
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
@@ -882,6 +936,12 @@ requirement_weight(node(ID, Package), Group, W) :-
|
||||
requirement_policy(Package, Group, "one_of"),
|
||||
requirement_group_satisfied(node(ID, Package), Group).
|
||||
|
||||
{ attr("build_requirement", node(ID, Package), BuildRequirement) : condition_requirement(TriggerID, "build_requirement", Package, BuildRequirement) } :-
|
||||
pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
|
||||
requirement_group_member(ConditionID, Package, Group),
|
||||
activate_requirement(node(ID, Package), Group),
|
||||
requirement_group(Package, Group).
|
||||
|
||||
requirement_group_satisfied(node(ID, Package), X) :-
|
||||
1 { condition_holds(Y, node(ID, Package)) : requirement_group_member(Y, Package, X) } ,
|
||||
requirement_policy(Package, X, "any_of"),
|
||||
|
@@ -85,8 +85,10 @@ def is_virtual(self, name: str) -> bool:
|
||||
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
|
||||
"""Returns true if a package is allowed on the current host"""
|
||||
pkg_cls = self.repo.get_pkg_class(pkg_name)
|
||||
no_condition = spack.spec.Spec()
|
||||
for when_spec, conditions in pkg_cls.requirements.items():
|
||||
if not when_spec.intersects(self._platform_condition):
|
||||
# Restrict analysis to unconditional requirements
|
||||
if when_spec != no_condition:
|
||||
continue
|
||||
for requirements, _, _ in conditions:
|
||||
if not any(x.intersects(self._platform_condition) for x in requirements):
|
||||
|
@@ -111,22 +111,14 @@
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
"SpecParseError",
|
||||
"UnsupportedPropagationError",
|
||||
"DuplicateDependencyError",
|
||||
"DuplicateCompilerSpecError",
|
||||
"UnsupportedCompilerError",
|
||||
"DuplicateArchitectureError",
|
||||
"InconsistentSpecError",
|
||||
"InvalidDependencyError",
|
||||
"NoProviderError",
|
||||
"MultipleProviderError",
|
||||
"UnsatisfiableSpecNameError",
|
||||
"UnsatisfiableVersionSpecError",
|
||||
"UnsatisfiableCompilerSpecError",
|
||||
"UnsatisfiableCompilerFlagSpecError",
|
||||
"UnsatisfiableArchitectureSpecError",
|
||||
"UnsatisfiableProviderSpecError",
|
||||
"UnsatisfiableDependencySpecError",
|
||||
"AmbiguousHashError",
|
||||
"InvalidHashError",
|
||||
@@ -845,7 +837,7 @@ def _shared_subset_pair_iterate(container1, container2):
|
||||
b_idx += 1
|
||||
|
||||
|
||||
class FlagMap(lang.HashableMap):
|
||||
class FlagMap(lang.HashableMap[str, List[CompilerFlag]]):
|
||||
__slots__ = ("spec",)
|
||||
|
||||
def __init__(self, spec):
|
||||
@@ -1437,7 +1429,7 @@ def with_compiler(self, compiler: "Spec") -> "SpecAnnotations":
|
||||
def __repr__(self) -> str:
|
||||
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
|
||||
if self.compiler_node_attribute:
|
||||
result += f"with_compiler({str(self.compiler_node_attribute)})"
|
||||
result += f".with_compiler({str(self.compiler_node_attribute)})"
|
||||
return result
|
||||
|
||||
|
||||
@@ -1706,7 +1698,9 @@ def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
|
||||
result[key] = list(group)
|
||||
return result
|
||||
|
||||
def _add_flag(self, name: str, value: str, propagate: bool, concrete: bool) -> None:
|
||||
def _add_flag(
|
||||
self, name: str, value: Union[str, bool], propagate: bool, concrete: bool
|
||||
) -> None:
|
||||
"""Called by the parser to add a known flag"""
|
||||
|
||||
if propagate and name in vt.RESERVED_NAMES:
|
||||
@@ -1716,6 +1710,7 @@ def _add_flag(self, name: str, value: str, propagate: bool, concrete: bool) -> N
|
||||
|
||||
valid_flags = FlagMap.valid_compiler_flags()
|
||||
if name == "arch" or name == "architecture":
|
||||
assert type(value) is str, "architecture have a string value"
|
||||
parts = tuple(value.split("-"))
|
||||
plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
|
||||
self._set_architecture(platform=plat, os=os, target=tgt)
|
||||
@@ -1729,17 +1724,15 @@ def _add_flag(self, name: str, value: str, propagate: bool, concrete: bool) -> N
|
||||
self.namespace = value
|
||||
elif name in valid_flags:
|
||||
assert self.compiler_flags is not None
|
||||
assert type(value) is str, f"{name} must have a string value"
|
||||
flags_and_propagation = spack.compilers.flags.tokenize_flags(value, propagate)
|
||||
flag_group = " ".join(x for (x, y) in flags_and_propagation)
|
||||
for flag, propagation in flags_and_propagation:
|
||||
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
|
||||
else:
|
||||
if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
|
||||
self.variants[name] = vt.BoolValuedVariant(name, value, propagate)
|
||||
elif concrete:
|
||||
self.variants[name] = vt.MultiValuedVariant(name, value, propagate)
|
||||
else:
|
||||
self.variants[name] = vt.VariantBase(name, value, propagate)
|
||||
self.variants[name] = vt.VariantValue.from_string_or_bool(
|
||||
name, value, propagate=propagate, concrete=concrete
|
||||
)
|
||||
|
||||
def _set_architecture(self, **kwargs):
|
||||
"""Called by the parser to set the architecture."""
|
||||
@@ -1868,9 +1861,7 @@ def add_dependency_edge(
|
||||
@property
|
||||
def fullname(self):
|
||||
return (
|
||||
("%s.%s" % (self.namespace, self.name))
|
||||
if self.namespace
|
||||
else (self.name if self.name else "")
|
||||
f"{self.namespace}.{self.name}" if self.namespace else (self.name if self.name else "")
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -3401,7 +3392,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies:
|
||||
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
@@ -3412,6 +3403,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
||||
# will be verified later.
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
mock_nodes_from_old_specfiles = set()
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||
@@ -3433,13 +3425,27 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
if current_node.original_spec_format() < 5 or (
|
||||
current_node.original_spec_format() >= 5 and current_node.external
|
||||
):
|
||||
compiler_spec = current_node.annotations.compiler_node_attribute
|
||||
if compiler_spec is None:
|
||||
return False
|
||||
|
||||
mock_nodes_from_old_specfiles.add(compiler_spec)
|
||||
# This checks that the single node compiler spec satisfies the request
|
||||
# of a direct dependency. The check is not perfect, but based on heuristic.
|
||||
if not compiler_spec.satisfies(rhs_edge.spec):
|
||||
return False
|
||||
|
||||
else:
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
|
||||
continue
|
||||
|
||||
@@ -3479,8 +3485,9 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
|
||||
@@ -3954,6 +3961,8 @@ def format_attribute(match_object: Match) -> str:
|
||||
except AttributeError:
|
||||
if part == "compiler":
|
||||
return "none"
|
||||
elif part == "specfile_version":
|
||||
return f"v{current.original_spec_format()}"
|
||||
|
||||
raise SpecFormatStringError(
|
||||
f"Attempted to format attribute {attribute}. "
|
||||
@@ -4479,7 +4488,7 @@ def has_virtual_dependency(self, virtual: str) -> bool:
|
||||
return bool(self.dependencies(virtuals=(virtual,)))
|
||||
|
||||
|
||||
class VariantMap(lang.HashableMap):
|
||||
class VariantMap(lang.HashableMap[str, vt.VariantValue]):
|
||||
"""Map containing variant instances. New values can be added only
|
||||
if the key is not already present."""
|
||||
|
||||
@@ -4489,7 +4498,7 @@ def __init__(self, spec: Spec):
|
||||
|
||||
def __setitem__(self, name, vspec):
|
||||
# Raise a TypeError if vspec is not of the right type
|
||||
if not isinstance(vspec, vt.VariantBase):
|
||||
if not isinstance(vspec, vt.VariantValue):
|
||||
raise TypeError(
|
||||
"VariantMap accepts only values of variant types "
|
||||
f"[got {type(vspec).__name__} instead]"
|
||||
@@ -4629,7 +4638,7 @@ def __str__(self):
|
||||
bool_keys = []
|
||||
kv_keys = []
|
||||
for key in sorted_keys:
|
||||
if isinstance(self[key].value, bool):
|
||||
if self[key].type == vt.VariantType.BOOL:
|
||||
bool_keys.append(key)
|
||||
else:
|
||||
kv_keys.append(key)
|
||||
@@ -4662,7 +4671,8 @@ def substitute_abstract_variants(spec: Spec):
|
||||
unknown = []
|
||||
for name, v in spec.variants.items():
|
||||
if name == "dev_path":
|
||||
spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
|
||||
v.type = vt.VariantType.SINGLE
|
||||
v.concrete = True
|
||||
continue
|
||||
elif name in vt.RESERVED_NAMES:
|
||||
continue
|
||||
@@ -4685,7 +4695,7 @@ def substitute_abstract_variants(spec: Spec):
|
||||
if rest:
|
||||
continue
|
||||
|
||||
new_variant = pkg_variant.make_variant(v._original_value)
|
||||
new_variant = pkg_variant.make_variant(*v.values)
|
||||
pkg_variant.validate_or_raise(new_variant, spec.name)
|
||||
spec.variants.substitute(new_variant)
|
||||
|
||||
@@ -4811,7 +4821,7 @@ def from_node_dict(cls, node):
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
spec.variants[name] = vt.VariantValue.from_node_dict(
|
||||
name, values, propagate=propagate, abstract=name in abstract_variants
|
||||
)
|
||||
|
||||
@@ -4837,7 +4847,7 @@ def from_node_dict(cls, node):
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
mvar.set(*patches)
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
@@ -5162,25 +5172,6 @@ def eval_conditional(string):
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
"""Wrapper for ParseError for when we're parsing specs."""
|
||||
|
||||
def __init__(self, parse_error):
|
||||
super().__init__(parse_error.message)
|
||||
self.string = parse_error.string
|
||||
self.pos = parse_error.pos
|
||||
|
||||
@property
|
||||
def long_message(self):
|
||||
return "\n".join(
|
||||
[
|
||||
" Encountered when parsing spec:",
|
||||
" %s" % self.string,
|
||||
" %s^" % (" " * self.pos),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvalidVariantForSpecError(spack.error.SpecError):
|
||||
"""Raised when an invalid conditional variant is specified."""
|
||||
|
||||
@@ -5198,14 +5189,6 @@ class DuplicateDependencyError(spack.error.SpecError):
|
||||
"""Raised when the same dependency occurs in a spec twice."""
|
||||
|
||||
|
||||
class MultipleVersionError(spack.error.SpecError):
|
||||
"""Raised when version constraints occur in a spec twice."""
|
||||
|
||||
|
||||
class DuplicateCompilerSpecError(spack.error.SpecError):
|
||||
"""Raised when the same compiler occurs in a spec twice."""
|
||||
|
||||
|
||||
class UnsupportedCompilerError(spack.error.SpecError):
|
||||
"""Raised when the user asks for a compiler spack doesn't know about."""
|
||||
|
||||
@@ -5214,11 +5197,6 @@ class DuplicateArchitectureError(spack.error.SpecError):
|
||||
"""Raised when the same architecture occurs in a spec twice."""
|
||||
|
||||
|
||||
class InconsistentSpecError(spack.error.SpecError):
|
||||
"""Raised when two nodes in the same spec DAG have inconsistent
|
||||
constraints."""
|
||||
|
||||
|
||||
class InvalidDependencyError(spack.error.SpecError):
|
||||
"""Raised when a dependency in a spec is not actually a dependency
|
||||
of the package."""
|
||||
@@ -5230,30 +5208,6 @@ def __init__(self, pkg, deps):
|
||||
)
|
||||
|
||||
|
||||
class NoProviderError(spack.error.SpecError):
|
||||
"""Raised when there is no package that provides a particular
|
||||
virtual dependency.
|
||||
"""
|
||||
|
||||
def __init__(self, vpkg):
|
||||
super().__init__("No providers found for virtual package: '%s'" % vpkg)
|
||||
self.vpkg = vpkg
|
||||
|
||||
|
||||
class MultipleProviderError(spack.error.SpecError):
|
||||
"""Raised when there is no package that provides a particular
|
||||
virtual dependency.
|
||||
"""
|
||||
|
||||
def __init__(self, vpkg, providers):
|
||||
"""Takes the name of the vpkg"""
|
||||
super().__init__(
|
||||
"Multiple providers found for '%s': %s" % (vpkg, [str(s) for s in providers])
|
||||
)
|
||||
self.vpkg = vpkg
|
||||
self.providers = providers
|
||||
|
||||
|
||||
class UnsatisfiableSpecNameError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when two specs aren't even for the same package."""
|
||||
|
||||
@@ -5268,20 +5222,6 @@ def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "version")
|
||||
|
||||
|
||||
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec compiler conflicts with package constraints."""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "compiler")
|
||||
|
||||
|
||||
class UnsatisfiableCompilerFlagSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec variant conflicts with package constraints."""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "compiler_flags")
|
||||
|
||||
|
||||
class UnsatisfiableArchitectureSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec architecture conflicts with package constraints."""
|
||||
|
||||
@@ -5289,14 +5229,6 @@ def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "architecture")
|
||||
|
||||
|
||||
class UnsatisfiableProviderSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a provider is supplied but constraints don't match
|
||||
a vpkg requirement"""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "provider")
|
||||
|
||||
|
||||
# TODO: get rid of this and be more specific about particular incompatible
|
||||
# dep constraints
|
||||
class UnsatisfiableDependencySpecError(spack.error.UnsatisfiableSpecError):
|
||||
|
@@ -1,263 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
from typing import List
|
||||
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class SpecList:
|
||||
def __init__(self, name="specs", yaml_list=None, reference=None):
|
||||
# Normalize input arguments
|
||||
yaml_list = yaml_list or []
|
||||
reference = reference or {}
|
||||
|
||||
self.name = name
|
||||
self._reference = reference # TODO: Do we need defensive copy here?
|
||||
|
||||
# Validate yaml_list before assigning
|
||||
if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
|
||||
raise ValueError(
|
||||
"yaml_list can contain only valid YAML types! Found:\n %s"
|
||||
% [type(s) for s in yaml_list]
|
||||
)
|
||||
self.yaml_list = yaml_list[:]
|
||||
|
||||
# Expansions can be expensive to compute and difficult to keep updated
|
||||
# We cache results and invalidate when self.yaml_list changes
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
@property
|
||||
def is_matrix(self):
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def specs_as_yaml_list(self):
|
||||
if self._expanded_list is None:
|
||||
self._expanded_list = self._expand_references(self.yaml_list)
|
||||
return self._expanded_list
|
||||
|
||||
@property
|
||||
def specs_as_constraints(self):
|
||||
if self._constraints is None:
|
||||
constraints = []
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
constraints.extend(_expand_matrix_constraints(item))
|
||||
else: # individual spec
|
||||
constraints.append([Spec(item)])
|
||||
self._constraints = constraints
|
||||
|
||||
return self._constraints
|
||||
|
||||
@property
|
||||
def specs(self) -> List[Spec]:
|
||||
if self._specs is None:
|
||||
specs = []
|
||||
# This could be slightly faster done directly from yaml_list,
|
||||
# but this way is easier to maintain.
|
||||
for constraint_list in self.specs_as_constraints:
|
||||
spec = constraint_list[0].copy()
|
||||
for const in constraint_list[1:]:
|
||||
spec.constrain(const)
|
||||
specs.append(spec)
|
||||
self._specs = specs
|
||||
|
||||
return self._specs
|
||||
|
||||
def add(self, spec):
|
||||
self.yaml_list.append(str(spec))
|
||||
|
||||
# expanded list can be updated without invalidation
|
||||
if self._expanded_list is not None:
|
||||
self._expanded_list.append(str(spec))
|
||||
|
||||
# Invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def remove(self, spec):
|
||||
# Get spec to remove from list
|
||||
remove = [
|
||||
s
|
||||
for s in self.yaml_list
|
||||
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
|
||||
]
|
||||
if not remove:
|
||||
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
|
||||
msg += f"Either {spec} is not in {self.name} or {spec} is "
|
||||
msg += "expanded from a matrix and cannot be removed directly."
|
||||
raise SpecListError(msg)
|
||||
|
||||
# Remove may contain more than one string representation of the same spec
|
||||
for item in remove:
|
||||
self.yaml_list.remove(item)
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def replace(self, idx: int, spec: str):
|
||||
"""Replace the existing spec at the index with the new one.
|
||||
|
||||
Args:
|
||||
idx: index of the spec to replace in the speclist
|
||||
spec: new spec
|
||||
"""
|
||||
self.yaml_list[idx] = spec
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def extend(self, other, copy_reference=True):
|
||||
self.yaml_list.extend(other.yaml_list)
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
if copy_reference:
|
||||
self._reference = other._reference
|
||||
|
||||
def update_reference(self, reference):
|
||||
self._reference = reference
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def _parse_reference(self, name):
|
||||
sigil = ""
|
||||
name = name[1:]
|
||||
|
||||
# Parse specs as constraints
|
||||
if name.startswith("^") or name.startswith("%"):
|
||||
sigil = name[0]
|
||||
name = name[1:]
|
||||
|
||||
# Make sure the reference is valid
|
||||
if name not in self._reference:
|
||||
msg = f"SpecList '{self.name}' refers to named list '{name}'"
|
||||
msg += " which does not appear in its reference dict."
|
||||
raise UndefinedReferenceError(msg)
|
||||
|
||||
return (name, sigil)
|
||||
|
||||
def _expand_references(self, yaml):
|
||||
if isinstance(yaml, list):
|
||||
ret = []
|
||||
|
||||
for item in yaml:
|
||||
# if it's a reference, expand it
|
||||
if isinstance(item, str) and item.startswith("$"):
|
||||
# replace the reference and apply the sigil if needed
|
||||
name, sigil = self._parse_reference(item)
|
||||
|
||||
referent = [
|
||||
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
|
||||
]
|
||||
ret.extend(referent)
|
||||
else:
|
||||
# else just recurse
|
||||
ret.append(self._expand_references(item))
|
||||
return ret
|
||||
elif isinstance(yaml, dict):
|
||||
# There can't be expansions in dicts
|
||||
return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
|
||||
else:
|
||||
# Strings are just returned
|
||||
return yaml
|
||||
|
||||
def __len__(self):
|
||||
return len(self.specs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.specs[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.specs)
|
||||
|
||||
|
||||
def _expand_matrix_constraints(matrix_config):
|
||||
# recurse so we can handle nested matrices
|
||||
expanded_rows = []
|
||||
for row in matrix_config["matrix"]:
|
||||
new_row = []
|
||||
for r in row:
|
||||
if isinstance(r, dict):
|
||||
# Flatten the nested matrix into a single row of constraints
|
||||
new_row.extend(
|
||||
[
|
||||
[" ".join([str(c) for c in expanded_constraint_list])]
|
||||
for expanded_constraint_list in _expand_matrix_constraints(r)
|
||||
]
|
||||
)
|
||||
else:
|
||||
new_row.append([r])
|
||||
expanded_rows.append(new_row)
|
||||
|
||||
excludes = matrix_config.get("exclude", []) # only compute once
|
||||
sigil = matrix_config.get("sigil", "")
|
||||
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
test_spec.constrain(constraint)
|
||||
|
||||
# Abstract variants don't have normal satisfaction semantics
|
||||
# Convert all variants to concrete types.
|
||||
# This method is best effort, so all existing variants will be
|
||||
# converted before any error is raised.
|
||||
# Catch exceptions because we want to be able to operate on
|
||||
# abstract specs without needing package information
|
||||
try:
|
||||
spack.spec.substitute_abstract_variants(test_spec)
|
||||
except spack.variant.UnknownVariantError:
|
||||
pass
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||
|
||||
# Add to list of constraints
|
||||
results.append(flat_combo)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _sigilify(item, sigil):
|
||||
if isinstance(item, dict):
|
||||
if sigil:
|
||||
item["sigil"] = sigil
|
||||
return item
|
||||
else:
|
||||
return sigil + item
|
||||
|
||||
|
||||
class SpecListError(SpackError):
|
||||
"""Error class for all errors related to SpecList objects."""
|
||||
|
||||
|
||||
class UndefinedReferenceError(SpecListError):
|
||||
"""Error class for undefined references in Spack stacks."""
|
||||
|
||||
|
||||
class InvalidSpecConstraintError(SpecListError):
|
||||
"""Error class for invalid spec constraints at concretize time."""
|
@@ -62,7 +62,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Iterator, List, Optional, Tuple
|
||||
from typing import Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
@@ -369,7 +369,7 @@ def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||
"""Raise a spec parsing error with token context."""
|
||||
raise SpecParsingError(string, self.ctx.current_token, self.literal_str) from cause
|
||||
|
||||
def add_flag(name: str, value: str, propagate: bool, concrete: bool):
|
||||
def add_flag(name: str, value: Union[str, bool], propagate: bool, concrete: bool):
|
||||
"""Wrapper around ``Spec._add_flag()`` that adds parser context to errors raised."""
|
||||
try:
|
||||
initial_spec._add_flag(name, value, propagate, concrete)
|
||||
|
@@ -93,6 +93,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
]
|
||||
|
||||
|
||||
# TODO/RepoSplit: Should this not rely on mock packages post split?
|
||||
@pytest.mark.parametrize(
|
||||
"config_section,data,failing_check",
|
||||
[
|
||||
@@ -113,7 +114,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_config_audits(config_section, data, failing_check):
|
||||
def test_config_audits(config_section, data, failing_check, mock_packages):
|
||||
with spack.config.override(config_section, data):
|
||||
reports = spack.audit.run_group("configs")
|
||||
assert any((check == failing_check) and errors for check, errors in reports)
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -873,10 +873,6 @@ def test_push_to_build_cache(
|
||||
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
|
||||
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
|
||||
|
||||
dl_dir = scratch / "download_dir"
|
||||
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
|
||||
assert len(os.listdir(dl_dir)) == 2
|
||||
|
||||
|
||||
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
|
||||
def push_or_raise(*args, **kwargs):
|
||||
@@ -2036,7 +2032,7 @@ def test_ci_verify_versions_valid(
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
||||
assert "Validated diff-test@2.1.5" in out
|
||||
@@ -2053,7 +2049,7 @@ def test_ci_verify_versions_standard_invalid(
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
||||
@@ -2064,7 +2060,7 @@ def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
||||
|
@@ -20,6 +20,8 @@
|
||||
config = spack.main.SpackCommand("config")
|
||||
env = spack.main.SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_packages")
|
||||
|
||||
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
|
@@ -1829,7 +1829,7 @@ def test_indirect_build_dep(tmp_path):
|
||||
build-only dep. Make sure this concrete DAG is preserved when writing the
|
||||
environment out and reading it back.
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None)])
|
||||
@@ -1862,7 +1862,7 @@ def test_store_different_build_deps(tmp_path):
|
||||
z1
|
||||
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import spack.cmd.info
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
|
||||
@@ -31,15 +33,12 @@ def _print(*args, **kwargs):
|
||||
return buffer
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"]
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_it_just_runs(pkg, extra_args):
|
||||
info(pkg, *extra_args)
|
||||
def test_it_just_runs(extra_args):
|
||||
info("vtk-m", *extra_args)
|
||||
|
||||
|
||||
def test_info_noversion(mock_packages, print_buffer):
|
||||
def test_info_noversion(print_buffer):
|
||||
"""Check that a mock package with no versions outputs None."""
|
||||
info("noversion")
|
||||
|
||||
@@ -58,7 +57,7 @@ def test_info_noversion(mock_packages, print_buffer):
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query,expected", [("zlib", "False"), ("find-externals1", "True (version)")]
|
||||
)
|
||||
def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, print_buffer):
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
|
||||
args = parser.parse_args(["--detectable", pkg_query])
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
@@ -70,13 +69,7 @@ def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, pr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query",
|
||||
[
|
||||
"hdf5",
|
||||
"cloverleaf3d",
|
||||
"trilinos",
|
||||
"gcc", # This should ensure --test's c_names processing loop covered
|
||||
],
|
||||
"pkg_query", ["vtk-m", "gcc"] # This should ensure --test's c_names processing loop covered
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_info_fields(pkg_query, extra_args, parser, print_buffer):
|
||||
|
@@ -6,16 +6,20 @@
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
list = SpackCommand("list")
|
||||
|
||||
|
||||
def test_list():
|
||||
output = list()
|
||||
assert "cloverleaf3d" in output
|
||||
assert "bzip2" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
@@ -41,7 +45,7 @@ def test_list_cli_output_format(mock_tty_stdout):
|
||||
assert out == out_str
|
||||
|
||||
|
||||
def test_list_filter(mock_packages):
|
||||
def test_list_filter():
|
||||
output = list("py-*")
|
||||
assert "py-extension1" in output
|
||||
assert "py-extension2" in output
|
||||
@@ -57,18 +61,18 @@ def test_list_filter(mock_packages):
|
||||
assert "mpich" not in output
|
||||
|
||||
|
||||
def test_list_search_description(mock_packages):
|
||||
def test_list_search_description():
|
||||
output = list("--search-description", "one build dependency")
|
||||
assert "depb" in output
|
||||
|
||||
|
||||
def test_list_format_name_only(mock_packages):
|
||||
def test_list_format_name_only():
|
||||
output = list("--format", "name_only")
|
||||
assert "zmpi" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
def test_list_format_version_json(mock_packages):
|
||||
def test_list_format_version_json():
|
||||
output = list("--format", "version_json")
|
||||
assert '{"name": "zmpi",' in output
|
||||
assert '{"name": "dyninst",' in output
|
||||
@@ -77,7 +81,7 @@ def test_list_format_version_json(mock_packages):
|
||||
json.loads(output)
|
||||
|
||||
|
||||
def test_list_format_html(mock_packages):
|
||||
def test_list_format_html():
|
||||
output = list("--format", "html")
|
||||
assert '<div class="section" id="zmpi">' in output
|
||||
assert "<h1>zmpi" in output
|
||||
@@ -86,7 +90,7 @@ def test_list_format_html(mock_packages):
|
||||
assert "<h1>hdf5" in output
|
||||
|
||||
|
||||
def test_list_update(tmpdir, mock_packages):
|
||||
def test_list_update(tmpdir):
|
||||
update_file = tmpdir.join("output")
|
||||
|
||||
# not yet created when list is run
|
||||
@@ -113,7 +117,7 @@ def test_list_update(tmpdir, mock_packages):
|
||||
assert f.read() == "empty\n"
|
||||
|
||||
|
||||
def test_list_tags(mock_packages):
|
||||
def test_list_tags():
|
||||
output = list("--tag", "tag1")
|
||||
assert "mpich" in output
|
||||
assert "mpich2" in output
|
||||
@@ -127,7 +131,7 @@ def test_list_tags(mock_packages):
|
||||
assert "mpich2" in output
|
||||
|
||||
|
||||
def test_list_count(mock_packages):
|
||||
def test_list_count():
|
||||
output = list("--count")
|
||||
assert int(output.strip()) == len(spack.repo.all_package_names())
|
||||
|
||||
@@ -137,11 +141,10 @@ def test_list_count(mock_packages):
|
||||
)
|
||||
|
||||
|
||||
# def test_list_repos(mock_packages, builder_test_repository):
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.repos_path, "builder.test"),
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
|
@@ -111,12 +111,12 @@ def split(output):
|
||||
pkg = spack.main.SpackCommand("pkg")
|
||||
|
||||
|
||||
def test_packages_path():
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin").packages_path
|
||||
def test_builtin_repo():
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")
|
||||
|
||||
|
||||
def test_mock_packages_path(mock_packages):
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
def test_mock_builtin_repo(mock_packages):
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,19 +23,19 @@ def test_help_option():
|
||||
assert repo.returncode in (None, 0)
|
||||
|
||||
|
||||
def test_create_add_list_remove(mutable_config, tmpdir):
|
||||
def test_create_add_list_remove(mutable_config, tmp_path: pathlib.Path):
|
||||
# Create a new repository and check that the expected
|
||||
# files are there
|
||||
repo("create", str(tmpdir), "mockrepo")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "repo.yaml"))
|
||||
repo("create", str(tmp_path), "mockrepo")
|
||||
assert (tmp_path / "spack_repo" / "mockrepo" / "repo.yaml").exists()
|
||||
|
||||
# Add the new repository and check it appears in the list output
|
||||
repo("add", "--scope=site", str(tmpdir))
|
||||
repo("add", "--scope=site", str(tmp_path / "spack_repo" / "mockrepo"))
|
||||
output = repo("list", "--scope=site", output=str)
|
||||
assert "mockrepo" in output
|
||||
|
||||
# Then remove it and check it's not there
|
||||
repo("remove", "--scope=site", str(tmpdir))
|
||||
repo("remove", "--scope=site", str(tmp_path / "spack_repo" / "mockrepo"))
|
||||
output = repo("list", "--scope=site", output=str)
|
||||
assert "mockrepo" not in output
|
||||
|
||||
|
@@ -116,7 +116,7 @@ def test_changed_no_base(git, tmpdir, capfd):
|
||||
assert "This repository does not have a 'foobar'" in err
|
||||
|
||||
|
||||
def test_changed_files_all_files():
|
||||
def test_changed_files_all_files(mock_packages):
|
||||
# it's hard to guarantee "all files", so do some sanity checks.
|
||||
files = set(
|
||||
[
|
||||
@@ -159,8 +159,12 @@ def test_bad_root(tmpdir):
|
||||
|
||||
def test_style_is_package(tmpdir):
|
||||
"""Ensure the is_package() function works."""
|
||||
assert spack.cmd.style.is_package("var/spack/repos/builtin/packages/hdf5/package.py")
|
||||
assert spack.cmd.style.is_package("var/spack/repos/builtin/packages/zlib/package.py")
|
||||
assert spack.cmd.style.is_package(
|
||||
"var/spack/repos/spack_repo/builtin/packages/hdf5/package.py"
|
||||
)
|
||||
assert spack.cmd.style.is_package(
|
||||
"var/spack/repos/spack_repo/builtin/packages/zlib/package.py"
|
||||
)
|
||||
assert not spack.cmd.style.is_package("lib/spack/spack/spec.py")
|
||||
assert not spack.cmd.style.is_package("lib/spack/external/pytest.py")
|
||||
|
||||
|
@@ -10,6 +10,9 @@
|
||||
versions = SpackCommand("versions")
|
||||
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
def test_safe_versions():
|
||||
"""Only test the safe versions of a package."""
|
||||
|
||||
@@ -70,11 +73,11 @@ def test_no_unchecksummed_versions():
|
||||
def test_versions_no_url():
|
||||
"""Test a package with versions but without a ``url`` attribute."""
|
||||
|
||||
versions("graphviz")
|
||||
versions("attributes-foo-app")
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
def test_no_versions_no_url():
|
||||
"""Test a package without versions or a ``url`` attribute."""
|
||||
|
||||
versions("opengl")
|
||||
versions("no-url-or-version")
|
||||
|
@@ -29,7 +29,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_repo(mutable_config):
|
||||
repo = os.path.join(spack.paths.repos_path, "compiler_runtime.test")
|
||||
repo = os.path.join(spack.paths.test_repos_path, "compiler_runtime.test")
|
||||
with spack.repo.use_repositories(repo) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -1719,7 +1719,7 @@ def test_reuse_with_unknown_namespace_dont_raise(
|
||||
|
||||
@pytest.mark.regression("45538")
|
||||
def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
myrepo = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"), namespace="myrepo")
|
||||
myrepo = spack.repo.MockRepositoryBuilder(tmpdir, namespace="mock_repo")
|
||||
myrepo.add_package("zlib")
|
||||
|
||||
builtin = spack.concretize.concretize_one("zlib")
|
||||
@@ -1727,21 +1727,19 @@ def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monk
|
||||
|
||||
with spack.repo.use_repositories(myrepo.root, override=False):
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
myrepo = spack.concretize.concretize_one("myrepo.zlib")
|
||||
myrepo = spack.concretize.concretize_one("mock_repo.zlib")
|
||||
|
||||
assert myrepo.namespace == "myrepo"
|
||||
assert myrepo.namespace == "mock_repo"
|
||||
|
||||
@pytest.mark.regression("28259")
|
||||
def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"), namespace="myrepo")
|
||||
builder = spack.repo.MockRepositoryBuilder(str(tmpdir), namespace="myrepo")
|
||||
builder.add_package("pkg-c")
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "myrepo"
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
del sys.modules["spack.pkg.myrepo.pkg-c"]
|
||||
del sys.modules["spack.pkg.myrepo"]
|
||||
del sys.modules["spack_repo.myrepo.packages.pkg_c"]
|
||||
builder.remove("pkg-c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
# TODO (INJECT CONFIGURATION): unclear why the cache needs to be invalidated explicitly
|
||||
@@ -1831,10 +1829,7 @@ def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages):
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "specs", list())
|
||||
|
||||
with pytest.raises(
|
||||
spack.solver.asp.InternalConcretizerError,
|
||||
match="a subset of input specs could not be solved for",
|
||||
):
|
||||
with pytest.raises(spack.solver.asp.OutputDoesNotSatisfyInputError):
|
||||
list(solver.solve_in_rounds(specs))
|
||||
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
@@ -2337,7 +2332,7 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
from cli.
|
||||
"""
|
||||
# 'builtin.mock" and "duplicates.test" share a 'gmake' package
|
||||
additional_repo = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
additional_repo = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(additional_repo, override=False):
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
@@ -2581,7 +2576,7 @@ def test_correct_external_is_selected_from_packages_yaml(self, mutable_config):
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -2816,7 +2811,7 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
|
||||
@pytest.fixture()
|
||||
def edges_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "edges.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "edges.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -3110,7 +3105,9 @@ def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||
|
||||
|
||||
def test_concretization_cache_roundtrip(use_concretization_cache, monkeypatch, mutable_config):
|
||||
def test_concretization_cache_roundtrip(
|
||||
mock_packages, use_concretization_cache, monkeypatch, mutable_config
|
||||
):
|
||||
"""Tests whether we can write the results of a clingo solve to the cache
|
||||
and load the same spec request from the cache to produce identical specs"""
|
||||
# Force determinism:
|
||||
@@ -3336,3 +3333,110 @@ def test_specifying_compilers_with_virtuals_syntax(default_mock_concretization):
|
||||
assert mpich["fortran"].satisfies("gcc")
|
||||
assert mpich["c"].satisfies("llvm")
|
||||
assert mpich["cxx"].satisfies("llvm")
|
||||
|
||||
|
||||
@pytest.mark.regression("49847")
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="issues with install mockery")
|
||||
def test_reuse_when_input_specifies_build_dep(install_mockery, do_not_check_runtimes_on_reuse):
|
||||
"""Test that we can reuse a spec when specifying build dependencies in the input"""
|
||||
pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9 %gcc@9"))
|
||||
PackageInstaller([pkgb_old.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
result = spack.concretize.concretize_one("pkg-b %gcc")
|
||||
assert pkgb_old.dag_hash() == result.dag_hash()
|
||||
|
||||
result = spack.concretize.concretize_one("pkg-a ^pkg-b %gcc@9")
|
||||
assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash()
|
||||
assert result.satisfies("%gcc@9")
|
||||
|
||||
result = spack.concretize.concretize_one("pkg-a %gcc@10 ^pkg-b %gcc@9")
|
||||
assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.regression("49847")
|
||||
def test_reuse_when_requiring_build_dep(
|
||||
install_mockery, do_not_check_runtimes_on_reuse, mutable_config
|
||||
):
|
||||
"""Test that we can reuse a spec when specifying build dependencies in requirements"""
|
||||
mutable_config.set("packages:all:require", "%gcc")
|
||||
pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9"))
|
||||
PackageInstaller([pkgb_old.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
result = spack.concretize.concretize_one("pkg-b")
|
||||
assert pkgb_old.dag_hash() == result.dag_hash(), result.tree()
|
||||
|
||||
|
||||
@pytest.mark.regression("50167")
|
||||
def test_input_analysis_and_conditional_requirements(default_mock_concretization):
|
||||
"""Tests that input analysis doesn't account for conditional requirement
|
||||
to discard possible dependencies.
|
||||
|
||||
If the requirement is conditional, and impossible to achieve on the current
|
||||
platform, the valid search space is still the complement of the condition that
|
||||
activates the requirement.
|
||||
"""
|
||||
libceed = default_mock_concretization("libceed")
|
||||
assert libceed["libxsmm"].satisfies("@main")
|
||||
assert libceed["libxsmm"].satisfies("platform=test")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_str,expected,not_expected",
|
||||
[
|
||||
# Compiler queries are as specific as the constraint on the external
|
||||
("gcc@10", ["%gcc", "%gcc@10"], ["%clang", "%gcc@9"]),
|
||||
("gcc", ["%gcc"], ["%clang", "%gcc@9", "%gcc@10"]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.regression("49841")
|
||||
def test_installing_external_with_compilers_directly(
|
||||
compiler_str, expected, not_expected, mutable_config, mock_packages, tmp_path
|
||||
):
|
||||
"""Tests that version constraints are taken into account for compiler annotations
|
||||
on externals
|
||||
"""
|
||||
spec_str = f"libelf@0.8.12 %{compiler_str}"
|
||||
packages_yaml = syaml.load_config(
|
||||
f"""
|
||||
packages:
|
||||
libelf:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: {spec_str}
|
||||
prefix: {tmp_path / 'libelf'}
|
||||
"""
|
||||
)
|
||||
mutable_config.set("packages", packages_yaml["packages"])
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
assert s.external
|
||||
assert all(s.satisfies(c) for c in expected)
|
||||
assert all(not s.satisfies(c) for c in not_expected)
|
||||
|
||||
|
||||
@pytest.mark.regression("49841")
|
||||
def test_using_externals_with_compilers(mutable_config, mock_packages, tmp_path):
|
||||
"""Tests that version constraints are taken into account for compiler annotations
|
||||
on externals, even imposed as transitive deps.
|
||||
"""
|
||||
packages_yaml = syaml.load_config(
|
||||
f"""
|
||||
packages:
|
||||
libelf:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: libelf@0.8.12 %gcc@10
|
||||
prefix: {tmp_path / 'libelf'}
|
||||
"""
|
||||
)
|
||||
mutable_config.set("packages", packages_yaml["packages"])
|
||||
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@:9")
|
||||
|
||||
s = spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@10:")
|
||||
|
||||
libelf = s["libelf"]
|
||||
assert libelf.external and libelf.satisfies("%gcc")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user