Compare commits
247 Commits
packages/m
...
ci/find-bo
Author | SHA1 | Date | |
---|---|---|---|
![]() |
56ca5b253a | ||
![]() |
09c6243d45 | ||
![]() |
859745f1a9 | ||
![]() |
ddabb8b12c | ||
![]() |
16bba32124 | ||
![]() |
7d87369ead | ||
![]() |
7723bd28ed | ||
![]() |
43f3a35150 | ||
![]() |
ae9f2d4d40 | ||
![]() |
5a3814ff15 | ||
![]() |
946c539dbd | ||
![]() |
0037462f9e | ||
![]() |
e5edac4d0c | ||
![]() |
3e1474dbbb | ||
![]() |
0f502bb6c3 | ||
![]() |
1eecbd3208 | ||
![]() |
6e92b9180c | ||
![]() |
ac9012da0c | ||
![]() |
e3cb4f09f0 | ||
![]() |
2e8600bb71 | ||
![]() |
d946c37cbb | ||
![]() |
47a9f0bdf7 | ||
![]() |
2bf900a893 | ||
![]() |
99bba0b1ce | ||
![]() |
a8506f9022 | ||
![]() |
4a40a76291 | ||
![]() |
fe9ddf22fc | ||
![]() |
1cae1299eb | ||
![]() |
8b106416c0 | ||
![]() |
e2088b599e | ||
![]() |
56446685ca | ||
![]() |
47a8d875c8 | ||
![]() |
56b2d250c1 | ||
![]() |
abbd09b4b2 | ||
![]() |
9e5fdc6614 | ||
![]() |
1224a3e8cf | ||
![]() |
6c3218920f | ||
![]() |
02cc3ea005 | ||
![]() |
641ab95a31 | ||
![]() |
e8b76c27e4 | ||
![]() |
0dbe4d54b6 | ||
![]() |
1eb6977049 | ||
![]() |
3f1cfdb7d7 | ||
![]() |
d438d7993d | ||
![]() |
aa0825d642 | ||
![]() |
978c20f35a | ||
![]() |
d535124500 | ||
![]() |
01f61a2eba | ||
![]() |
7d5e27d5e8 | ||
![]() |
d210425eef | ||
![]() |
6be07da201 | ||
![]() |
02b38716bf | ||
![]() |
d7bc624c61 | ||
![]() |
b7cecc9726 | ||
![]() |
393a2f562b | ||
![]() |
682fcec0b2 | ||
![]() |
d6baae525f | ||
![]() |
e1f2612581 | ||
![]() |
080fc875eb | ||
![]() |
69f417b26a | ||
![]() |
80b5106611 | ||
![]() |
34146c197a | ||
![]() |
209a3bf302 | ||
![]() |
e8c41cdbcb | ||
![]() |
a450dd31fa | ||
![]() |
7c1a309453 | ||
![]() |
78b6fa96e5 | ||
![]() |
1b315a9ede | ||
![]() |
82df0e549d | ||
![]() |
f5591f9068 | ||
![]() |
98c08d277d | ||
![]() |
facca4e2c8 | ||
![]() |
764029bcd1 | ||
![]() |
44cb4eca93 | ||
![]() |
39888d4df6 | ||
![]() |
f68ea49e54 | ||
![]() |
78b5e4cdfa | ||
![]() |
26515b8871 | ||
![]() |
74640987c7 | ||
![]() |
d6154645c7 | ||
![]() |
faed43704b | ||
![]() |
6fba31ce34 | ||
![]() |
112cead00b | ||
![]() |
9e2558bd56 | ||
![]() |
019058226f | ||
![]() |
ac0040f67d | ||
![]() |
38f341f12d | ||
![]() |
26ad22743f | ||
![]() |
46c2b8a565 | ||
![]() |
5cbb59f2b8 | ||
![]() |
f29fa1cfdf | ||
![]() |
c69951d6e1 | ||
![]() |
f406f27d9c | ||
![]() |
36ea208e12 | ||
![]() |
17e0774189 | ||
![]() |
3162c2459d | ||
![]() |
7cad6c62a3 | ||
![]() |
eb2ddf6fa2 | ||
![]() |
2bc2902fed | ||
![]() |
b362362291 | ||
![]() |
32bb5c7523 | ||
![]() |
a2b76c68a0 | ||
![]() |
62132919e1 | ||
![]() |
b06929f6df | ||
![]() |
0f33de157b | ||
![]() |
03a074ebe7 | ||
![]() |
4d12b6a4fd | ||
![]() |
26bb15e1fb | ||
![]() |
1bf92c7881 | ||
![]() |
eefe0b2eec | ||
![]() |
de6c6f0cd9 | ||
![]() |
309d3aa1ec | ||
![]() |
feff11f914 | ||
![]() |
de3b324983 | ||
![]() |
747cd374df | ||
![]() |
8b3ac40436 | ||
![]() |
28e9be443c | ||
![]() |
1381bede80 | ||
![]() |
6502785908 | ||
![]() |
53257408a3 | ||
![]() |
28d02dff60 | ||
![]() |
9d60b42a97 | ||
![]() |
9ff5a30574 | ||
![]() |
9a6c013365 | ||
![]() |
9f62a3e819 | ||
![]() |
e380e9a0ab | ||
![]() |
8415ea9ada | ||
![]() |
6960766e0c | ||
![]() |
0c2ca8c841 | ||
![]() |
273960fdbb | ||
![]() |
0cd2a1102c | ||
![]() |
e40676e901 | ||
![]() |
4ddb07e94f | ||
![]() |
50585d55c5 | ||
![]() |
5d6b5f3f6f | ||
![]() |
2351c19489 | ||
![]() |
08d49361f0 | ||
![]() |
c3c63e5ca4 | ||
![]() |
e72d4075bd | ||
![]() |
f9f97bf22b | ||
![]() |
8033455d5f | ||
![]() |
50a5a6fea4 | ||
![]() |
0de8a0e3f3 | ||
![]() |
0a26e74cc8 | ||
![]() |
9dfd91efbb | ||
![]() |
1a7baadbff | ||
![]() |
afcfd56ae5 | ||
![]() |
7eb2e704b6 | ||
![]() |
564b4fa263 | ||
![]() |
0a941b43ca | ||
![]() |
35ff24ddea | ||
![]() |
7019e4e3cb | ||
![]() |
cb16b8a047 | ||
![]() |
381acb3726 | ||
![]() |
d87ea0b256 | ||
![]() |
1a757e7f70 | ||
![]() |
704e2c53a8 | ||
![]() |
478d8a668c | ||
![]() |
7903f9fcfd | ||
![]() |
670d3d3fdc | ||
![]() |
e8aab6b31c | ||
![]() |
1ce408ecc5 | ||
![]() |
dc81a2dcdb | ||
![]() |
b10f51f020 | ||
![]() |
4f4e3f5607 | ||
![]() |
00fb80e766 | ||
![]() |
057603cad8 | ||
![]() |
5b8b6e492d | ||
![]() |
763279cd61 | ||
![]() |
e4237b9153 | ||
![]() |
d288658cf0 | ||
![]() |
2c22ae0576 | ||
![]() |
fc3fc94689 | ||
![]() |
b5013c1372 | ||
![]() |
e220674c4d | ||
![]() |
7f13518225 | ||
![]() |
96a13a97e6 | ||
![]() |
6d244b3f67 | ||
![]() |
6bc66db141 | ||
![]() |
acfb2b9270 | ||
![]() |
d92a2c31fb | ||
![]() |
e32561aff6 | ||
![]() |
4b0479159f | ||
![]() |
03bfd36926 | ||
![]() |
4d30c8dce4 | ||
![]() |
49d4104f22 | ||
![]() |
07fb83b493 | ||
![]() |
263007ba81 | ||
![]() |
3b6e99381f | ||
![]() |
a30af1ac54 | ||
![]() |
294742ab7b | ||
![]() |
6391559fb6 | ||
![]() |
d4d4f813a9 | ||
![]() |
4667163dc4 | ||
![]() |
439f105285 | ||
![]() |
f65b1fd7b6 | ||
![]() |
d23e06c27e | ||
![]() |
b76e9a887b | ||
![]() |
55ffd439ce | ||
![]() |
d8a7b88e7b | ||
![]() |
aaa1bb1d98 | ||
![]() |
0d94b8044b | ||
![]() |
5a52780f7c | ||
![]() |
dd0a8452ee | ||
![]() |
c467bba73e | ||
![]() |
d680a0cb99 | ||
![]() |
efadee26ef | ||
![]() |
2077b3a006 | ||
![]() |
8e0c659b51 | ||
![]() |
863ab5a597 | ||
![]() |
db4e76ab27 | ||
![]() |
6728a46a84 | ||
![]() |
5a09459dd5 | ||
![]() |
7e14ff806a | ||
![]() |
7e88cf795c | ||
![]() |
1536e3d422 | ||
![]() |
1fe8e63481 | ||
![]() |
dfca2c285e | ||
![]() |
2686f778fa | ||
![]() |
925e9c73b1 | ||
![]() |
aba447e885 | ||
![]() |
1113de0dad | ||
![]() |
4110225166 | ||
![]() |
24c839c837 | ||
![]() |
42c6a6b189 | ||
![]() |
b0ea1c6f24 | ||
![]() |
735102eb2b | ||
![]() |
2e3cdb349b | ||
![]() |
05c8030119 | ||
![]() |
bbcd4224fa | ||
![]() |
4c0cdb99b3 | ||
![]() |
f22d009c6d | ||
![]() |
c5a3e36ad0 | ||
![]() |
1c76ba1c3e | ||
![]() |
b969f739bd | ||
![]() |
4788c4774c | ||
![]() |
34de028dbc | ||
![]() |
a69254fd79 | ||
![]() |
af5f205759 | ||
![]() |
77f9100a59 | ||
![]() |
386bb71392 | ||
![]() |
0676d6457f | ||
![]() |
0b80e36867 | ||
![]() |
4c9816f10c | ||
![]() |
fb6741cf85 | ||
![]() |
3f2fa256fc | ||
![]() |
d5c8864942 |
36
.github/workflows/audit.yaml
vendored
36
.github/workflows/audit.yaml
vendored
@@ -17,10 +17,16 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
runs-on: ${{ matrix.system.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
system:
|
||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||
- { os: ubuntu-latest, shell: bash }
|
||||
- { os: macos-latest, shell: bash }
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -29,21 +35,33 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Setup for Windows run
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' }}
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,9 @@ jobs:
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
@@ -96,7 +98,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.3.0
|
||||
black==24.4.0
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/windows_python.yml
vendored
4
.github/workflows/windows_python.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
@@ -88,7 +88,7 @@ Resources:
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
@@ -15,7 +15,7 @@ concretizer:
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||
# give you a fresh concretization for your root specs.
|
||||
reuse: dependencies
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
|
@@ -19,7 +19,6 @@ packages:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
- intel
|
||||
providers:
|
||||
elf: [libelf]
|
||||
fuse: [macfuse]
|
||||
|
@@ -15,7 +15,7 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
blas: [openblas, amdblis]
|
||||
@@ -35,6 +35,7 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
|
@@ -220,6 +220,40 @@ section of the configuration:
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
---------------------------------
|
||||
Automatic push to a build cache
|
||||
---------------------------------
|
||||
|
||||
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --autopush <name> <url or path>
|
||||
|
||||
Or the autopush flag can be set for an existing mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||
|
||||
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
|
||||
will have the same effect as
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||
|
||||
.. note::
|
||||
|
||||
Packages are automatically pushed to a build cache only if they are built from source.
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
@@ -233,6 +233,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
* - Fedora Linux 39
|
||||
- ``fedora:39``
|
||||
- ``spack/fedora39``
|
||||
* - Fedora Linux 40
|
||||
- ``fedora:40``
|
||||
- ``spack/fedora40``
|
||||
|
||||
|
||||
|
||||
|
@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
||||
('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
|
||||
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
||||
@@ -1071,9 +1071,9 @@ Announcing a release
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
X, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Announce the release on Twitter.
|
||||
#. Announce the release on X.
|
||||
|
||||
* Compose the tweet on the ``@spackpm`` account per the
|
||||
``spack-twitter`` slack channel.
|
||||
|
@@ -6435,9 +6435,12 @@ the ``paths`` attribute:
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
platforms: ["linux", "darwin"]
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
If the ``platforms`` attribute is present, tests are run only if the current host
|
||||
matches one of the listed platforms.
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
@@ -6471,6 +6474,10 @@ package detection and checking that the outcome matches the expected
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
* - ``results:[0]:extra_attributes``
|
||||
- Extra attributes expected on the associated Spec
|
||||
- Nested dictionary with string as keys, and regular expressions as leaf values
|
||||
- No
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
|
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.1.1
|
||||
isort==5.13.2
|
||||
black==24.3.0
|
||||
black==24.4.0
|
||||
flake8==7.0.0
|
||||
mypy==1.9.0
|
||||
|
250
lib/spack/env/cc
vendored
250
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -173,22 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -201,6 +186,18 @@ for param in $params; do
|
||||
fi
|
||||
done
|
||||
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
# moving the eval inside the function would eval it every call.
|
||||
eval "\
|
||||
path_order() {
|
||||
case \"\$1\" in
|
||||
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||
/*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
"
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -420,11 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
||||
}
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -730,7 +755,7 @@ esac
|
||||
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -738,16 +763,25 @@ esac
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
@@ -798,38 +834,50 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
@@ -839,8 +887,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
@@ -848,8 +900,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
||||
|
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
index 1badeda585..892c868af3 100644
|
||||
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
@@ -198,15 +198,32 @@ def getuid():
|
||||
return os.getuid()
|
||||
|
||||
|
||||
def _win_rename(src, dst):
|
||||
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||
if os.path.samefile(src, dst):
|
||||
# src and dst are the same
|
||||
# do nothing and exit early
|
||||
return
|
||||
# If dst exists and is a symlink to a directory
|
||||
# we need to remove dst and then perform rename/replace
|
||||
# this is safe to do as there's no chance src == dst now
|
||||
os.remove(dst)
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||
# Windows invocation is abstracted behind additonal logic handling
|
||||
# remaining cases of divergent behavior accross platforms
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
_win_rename(src, dst)
|
||||
else:
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1217,10 +1234,12 @@ def windows_sfn(path: os.PathLike):
|
||||
import ctypes
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
# Method with null values returns size of short path name
|
||||
sz = k32.GetShortPathNameW(path, None, 0)
|
||||
# stub Windows types TCHAR[LENGTH]
|
||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
||||
TCHAR_arr = ctypes.c_wchar * sz
|
||||
ret_str = TCHAR_arr()
|
||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
||||
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||
return ret_str.value
|
||||
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
from typing import Any, NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -158,21 +158,22 @@ def get_timestamp(force=False):
|
||||
return ""
|
||||
|
||||
|
||||
def msg(message, *args, **kwargs):
|
||||
def msg(message: Any, *args: Any, newline: bool = True) -> None:
|
||||
if not msg_enabled():
|
||||
return
|
||||
|
||||
if isinstance(message, Exception):
|
||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
||||
message = f"{message.__class__.__name__}: {message}"
|
||||
else:
|
||||
message = str(message)
|
||||
|
||||
newline = kwargs.get("newline", True)
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
|
||||
nl = "\n" if newline else ""
|
||||
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
|
||||
|
||||
for arg in args:
|
||||
print(indent + _output_filter(str(arg)))
|
||||
|
||||
|
@@ -237,7 +237,6 @@ def transpose():
|
||||
def colified(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
|
@@ -62,6 +62,7 @@
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ColorParseError(Exception):
|
||||
@@ -95,7 +96,7 @@ def __init__(self, message):
|
||||
} # white
|
||||
|
||||
# Regex to be used for color formatting
|
||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
||||
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
|
||||
|
||||
# Mapping from color arguments to values for tty.set_color
|
||||
color_when_values = {"always": True, "auto": None, "never": False}
|
||||
@@ -203,77 +204,64 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False, zsh=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
self.zsh = zsh
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
result = f"\033[{s}m"
|
||||
|
||||
if self.enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
return result
|
||||
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if color:
|
||||
if zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
return ""
|
||||
result = f"\033[{s}m"
|
||||
|
||||
def __call__(self, match):
|
||||
"""Convert a match object generated by ``color_re`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
style, color, text = match.groups()
|
||||
m = match.group(0)
|
||||
if enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
if m == "@@":
|
||||
return "@"
|
||||
elif m == "@.":
|
||||
return self.escape(0)
|
||||
elif m == "@":
|
||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
||||
|
||||
string = styles[style]
|
||||
if color:
|
||||
if color not in colors:
|
||||
raise ColorParseError(
|
||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
||||
)
|
||||
string += ";" + str(colors[color])
|
||||
|
||||
colored_text = ""
|
||||
if text:
|
||||
colored_text = text + self.escape(0)
|
||||
|
||||
return self.escape(string) + colored_text
|
||||
return result
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def colorize(string, **kwargs):
|
||||
def colorize(
|
||||
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
|
||||
) -> str:
|
||||
"""Replace all color expressions in a string with ANSI control codes.
|
||||
|
||||
Args:
|
||||
string (str): The string to replace
|
||||
string: The string to replace
|
||||
|
||||
Returns:
|
||||
str: The filtered string
|
||||
The filtered string
|
||||
|
||||
Keyword Arguments:
|
||||
color (bool): If False, output will be plain text without control
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
color: If False, output will be plain text without control codes, for output to
|
||||
non-console devices (default: automatically choose color or not)
|
||||
enclose: If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
zsh = kwargs.get("zsh", False)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
color = color if color is not None else get_color_when()
|
||||
|
||||
def match_to_ansi(match):
|
||||
"""Convert a match object generated by ``COLOR_RE`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
escaped_at, dot, style, color_code, text = match.groups()
|
||||
|
||||
if escaped_at:
|
||||
return "@"
|
||||
elif dot:
|
||||
return _escape(0, color, enclose, zsh)
|
||||
elif not (style or color_code):
|
||||
raise ColorParseError(
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
return ansi_code
|
||||
|
||||
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
|
||||
|
||||
|
||||
def clen(string):
|
||||
@@ -305,7 +293,7 @@ def cprint(string, stream=None, color=None):
|
||||
cwrite(string + "\n", stream, color)
|
||||
|
||||
|
||||
def cescape(string):
|
||||
def cescape(string: str) -> str:
|
||||
"""Escapes special characters needed for color codes.
|
||||
|
||||
Replaces the following symbols with their equivalent literal forms:
|
||||
@@ -321,10 +309,7 @@ def cescape(string):
|
||||
Returns:
|
||||
(str): the string with color codes escaped
|
||||
"""
|
||||
string = str(string)
|
||||
string = string.replace("@", "@@")
|
||||
string = string.replace("}", "}}")
|
||||
return string
|
||||
return string.replace("@", "@@").replace("}", "}}")
|
||||
|
||||
|
||||
class ColorStream:
|
||||
|
@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
kwargs=("pkgs", "debug_log"),
|
||||
)
|
||||
|
||||
|
||||
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
@@ -1095,6 +1095,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
@@ -1111,4 +1112,75 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
matched_detection = []
|
||||
for candidate in expected_specs:
|
||||
try:
|
||||
idx = specs.index(candidate)
|
||||
matched_detection.append((candidate, specs[idx]))
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
|
||||
_details = [f"{_expected} is not a valid regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if not _regex.match(_detected):
|
||||
_summary = (
|
||||
f'{pkg_name}: error when trying to match "{_expected}" '
|
||||
f"in extra attributes"
|
||||
)
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
_details = [
|
||||
f'"{_expected}" was expected',
|
||||
f'"{_detected}" was detected',
|
||||
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
|
||||
result.append(error_cls(summary=_summary, details=_details))
|
||||
|
||||
_common = set(_expected.keys()) & set(_detected.keys())
|
||||
for _key in _common:
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
for expected, detected in matched_detection:
|
||||
# We might not want to test all attributes, so avoid not_expected
|
||||
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
|
||||
if not_detected:
|
||||
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
|
||||
details = [
|
||||
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
common = set(expected.extra_attributes) & set(detected.extra_attributes)
|
||||
for key in common:
|
||||
errors.extend(
|
||||
_compare_extra_attribute(
|
||||
expected.extra_attributes[key],
|
||||
detected.extra_attributes[key],
|
||||
_spec=expected,
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
@@ -173,35 +173,14 @@ def _read_metadata(self, package_name: str) -> Any:
|
||||
return data
|
||||
|
||||
def _install_by_hash(
|
||||
self,
|
||||
pkg_hash: str,
|
||||
pkg_sha256: str,
|
||||
index: List[spack.spec.Spec],
|
||||
bincache_platform: spack.platforms.Platform,
|
||||
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
|
||||
) -> None:
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
self,
|
||||
@@ -232,7 +211,7 @@ def _install_and_test(
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
|
@@ -43,7 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -57,8 +57,10 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,6 +68,7 @@
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
@@ -78,7 +81,7 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
@@ -101,9 +104,13 @@
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||
SPACK_PREFIX = "SPACK_PREFIX"
|
||||
@@ -416,7 +423,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -544,9 +551,26 @@ def update_compiler_args_for_dep(dep):
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
spack_managed_dirs: Set[str] = {
|
||||
spack.stage.get_stage_root(),
|
||||
spack.store.STORE.db.root,
|
||||
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||
}
|
||||
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
|
||||
|
||||
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
|
||||
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
@@ -583,10 +607,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
|
@@ -139,11 +139,6 @@ def initconfig_compiler_entries(self):
|
||||
"endif()\n",
|
||||
]
|
||||
|
||||
# We defined hipcc as top-level compiler for packages when +rocm.
|
||||
# This avoid problems coming from rocm flags being applied to another compiler.
|
||||
if "+rocm" in spec:
|
||||
entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc))
|
||||
|
||||
flags = spec.compiler_flags
|
||||
|
||||
# use global spack compiler flags
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -21,7 +21,7 @@
|
||||
|
||||
|
||||
class MakefilePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, variant
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -30,7 +30,7 @@ class IntelOneApiPackage(Package):
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
redistribute(source=False, binary=False)
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
|
@@ -80,6 +80,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
@@ -156,30 +157,23 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
# ASAN
|
||||
@staticmethod
|
||||
def asan_on(env, llvm_path):
|
||||
def asan_on(self, env: EnvironmentModifications):
|
||||
llvm_path = self.spec["llvm-amdgpu"].prefix
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, dirs, files in os.walk(llvm_path):
|
||||
for root, _, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
try:
|
||||
# This will throw an error if imported on a non-Linux platform.
|
||||
import distro
|
||||
|
||||
distname = distro.id()
|
||||
except ImportError:
|
||||
distname = "unknown"
|
||||
if "rhel" in distname or "sles" in distname:
|
||||
if "rhel" in self.spec.os or "sles" in self.spec.os:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
else:
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
|
||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
@@ -16,8 +16,8 @@
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import List, Optional
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
@@ -113,54 +113,24 @@ def _remove_reserved_tags(tags):
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
def _spec_ci_label(s):
|
||||
return f"{s.name}/{s.dag_hash(7)}"
|
||||
|
||||
|
||||
def _add_dependency(spec_label, dep_label, deps):
|
||||
if spec_label == dep_label:
|
||||
return
|
||||
if spec_label not in deps:
|
||||
deps[spec_label] = set()
|
||||
deps[spec_label].add(dep_label)
|
||||
PlainNodes = Dict[str, spack.spec.Spec]
|
||||
PlainEdges = Dict[str, Set[str]]
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = _compute_spec_deps(specs)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj["dependencies"]
|
||||
specs = spec_deps_obj["specs"]
|
||||
|
||||
for entry in specs:
|
||||
spec_labels[entry["label"]] = entry["spec"]
|
||||
|
||||
for entry in dependencies:
|
||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
def stage_spec_jobs(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges, List[Set[str]]]:
|
||||
"""Turn a DAG into a list of stages (set of nodes), the list is ordered topologically, so that
|
||||
each node in a stage has dependencies only in previous stages.
|
||||
|
||||
Arguments:
|
||||
specs (Iterable): Specs to build
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
|
||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||
as pkg-name/hash-prefix) to concrete specs.
|
||||
|
||||
deps: A dictionary where the keys should also have appeared as keys in
|
||||
the spec_labels dictionary, and the values are the set of
|
||||
dependencies for that spec.
|
||||
|
||||
stages: An ordered list of sets, each of which contains all the jobs to
|
||||
built in that stage. The jobs are expressed in the same format as
|
||||
the keys in the spec_labels and deps objects.
|
||||
specs: Specs to build
|
||||
|
||||
Returns: A tuple (nodes, edges, stages) where ``nodes`` maps labels to specs, ``edges`` maps
|
||||
labels to a set of labels of dependencies, and ``stages`` is a topologically ordered list
|
||||
of sets of labels.
|
||||
"""
|
||||
|
||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||
@@ -177,17 +147,12 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
|
||||
return new_deps
|
||||
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
nodes, edges = _extract_dag(specs)
|
||||
|
||||
_get_spec_dependencies(specs, deps, spec_labels)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
# overwritten rather than being modified each time through the loop,
|
||||
# thus preserving the original value of "deps" saved here.
|
||||
dependencies = deps
|
||||
unstaged = set(spec_labels.keys())
|
||||
# Save the original edges, as we need to return them at the end of the function. In the loop
|
||||
# below, the "dependencies" variable is rebound rather than mutated, so "edges" is not mutated.
|
||||
dependencies = edges
|
||||
unstaged = set(nodes.keys())
|
||||
stages = []
|
||||
|
||||
while dependencies:
|
||||
@@ -203,7 +168,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
if unstaged:
|
||||
stages.append(unstaged.copy())
|
||||
|
||||
return spec_labels, deps, stages
|
||||
return nodes, edges, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
@@ -235,87 +200,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
comprehensive list of all the edges in the dependency graph. For
|
||||
example, given a single spec like 'readline@7.0', this function
|
||||
generates the following JSON object:
|
||||
def _extract_dag(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges]:
|
||||
"""Extract a sub-DAG as plain old Python objects with external nodes removed."""
|
||||
nodes: PlainNodes = {}
|
||||
edges: PlainEdges = defaultdict(set)
|
||||
|
||||
.. code-block:: JSON
|
||||
for edge in traverse.traverse_edges(specs, cover="edges"):
|
||||
if (edge.parent and edge.parent.external) or edge.spec.external:
|
||||
continue
|
||||
child_id = _spec_ci_label(edge.spec)
|
||||
nodes[child_id] = edge.spec
|
||||
if edge.parent:
|
||||
parent_id = _spec_ci_label(edge.parent)
|
||||
nodes[parent_id] = edge.parent
|
||||
edges[parent_id].add(child_id)
|
||||
|
||||
{
|
||||
"dependencies": [
|
||||
{
|
||||
"depends": "readline/ip6aiun",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "ncurses/y43rifz",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "ncurses/y43rifz",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "pkgconf/eg355zb",
|
||||
"spec": "ncurses/y43rifz"
|
||||
},
|
||||
{
|
||||
"depends": "pkgconf/eg355zb",
|
||||
"spec": "readline/ip6aiun"
|
||||
}
|
||||
],
|
||||
"specs": [
|
||||
{
|
||||
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
|
||||
"label": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
|
||||
"label": "ncurses/y43rifz"
|
||||
},
|
||||
{
|
||||
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
|
||||
"label": "pkgconf/eg355zb"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
spec_labels = {}
|
||||
|
||||
specs = []
|
||||
dependencies = []
|
||||
|
||||
def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg(f"Will not stage external pkg: {s}")
|
||||
continue
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg(f"Will not stage external dep: {d}")
|
||||
continue
|
||||
|
||||
append_dep(skey, dkey)
|
||||
|
||||
for spec_label, concrete_spec in spec_labels.items():
|
||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
return deps_json_obj
|
||||
return nodes, edges
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
@@ -327,7 +227,7 @@ def _format_job_needs(
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_key = _spec_ci_label(dep_job)
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or rebuild:
|
||||
|
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
transform = {"package": decorator, "fullpackage": decorator}
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
string += s.cformat(format_string, transform=transform)
|
||||
string += decorator(s, s.cformat(format_string))
|
||||
return string
|
||||
|
||||
def format_list(specs):
|
||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -84,7 +84,7 @@ def externals(parser, args):
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
|
@@ -133,6 +133,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||
"runtime dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -367,6 +372,25 @@ def _make_pool() -> MaybePool:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
for spec in specs:
|
||||
if spec.package.redistribute_binary:
|
||||
remaining_specs.append(spec)
|
||||
else:
|
||||
removed_specs.append(spec)
|
||||
if removed_specs:
|
||||
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
|
||||
tty.debug(
|
||||
"The following specs will not be added to the binary cache"
|
||||
" because they cannot be redistributed:\n"
|
||||
f"{colified_output}\n"
|
||||
"You can use `--private` to include them."
|
||||
)
|
||||
return remaining_specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -417,6 +441,8 @@ def push_fn(args):
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -14,6 +13,7 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -69,6 +69,12 @@ def setup_parser(subparser):
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--only-roots",
|
||||
action="store_true",
|
||||
help="don't show full list of installed specs in an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--show-concretized",
|
||||
@@ -189,26 +195,22 @@ def query_arguments(args):
|
||||
return q_args
|
||||
|
||||
|
||||
def setup_env(env):
|
||||
def make_env_decorator(env):
|
||||
"""Create a function for decorating specs when in an environment."""
|
||||
|
||||
def strip_build(seq):
|
||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
||||
|
||||
added = set(strip_build(env.added_specs()))
|
||||
roots = set(strip_build(env.roots()))
|
||||
removed = set(strip_build(env.removed_specs()))
|
||||
roots = set(env.roots())
|
||||
removed = set(env.removed_specs())
|
||||
|
||||
def decorator(spec, fmt):
|
||||
# add +/-/* to show added/removed/root specs
|
||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||
return color.colorize("@*{%s}" % fmt)
|
||||
return color.colorize(f"@*{{{fmt}}}")
|
||||
elif spec in removed:
|
||||
return color.colorize("@K{%s}" % fmt)
|
||||
return color.colorize(f"@K{{{fmt}}}")
|
||||
else:
|
||||
return "%s" % fmt
|
||||
return fmt
|
||||
|
||||
return decorator, added, roots, removed
|
||||
return decorator
|
||||
|
||||
|
||||
def display_env(env, args, decorator, results):
|
||||
@@ -223,28 +225,51 @@ def display_env(env, args, decorator, results):
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
tty.msg("No root specs")
|
||||
else:
|
||||
tty.msg("Root specs")
|
||||
num_roots = len(env.user_specs) or "No"
|
||||
tty.msg(f"{num_roots} root specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
root_args = copy.copy(args)
|
||||
root_args.paths = False
|
||||
concrete_specs = {
|
||||
root: concrete_root
|
||||
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||
}
|
||||
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
def root_decorator(spec, string):
|
||||
"""Decorate root specs with their install status if needed"""
|
||||
concrete = concrete_specs.get(spec)
|
||||
if concrete:
|
||||
status = color.colorize(concrete.install_status().value)
|
||||
hash = concrete.dag_hash()
|
||||
else:
|
||||
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||
hash = "-" * 32
|
||||
|
||||
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||
status = status[:-2]
|
||||
|
||||
if args.long or args.very_long:
|
||||
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||
return f"{status} {hash} {string}"
|
||||
else:
|
||||
return f"{status} {string}"
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
args,
|
||||
# these are overrides of CLI args
|
||||
paths=False,
|
||||
long=False,
|
||||
very_long=False,
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
@@ -254,7 +279,7 @@ def display_env(env, args, decorator, results):
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
@@ -263,9 +288,10 @@ def find(parser, args):
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
if env:
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -292,9 +318,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
|
||||
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
|
||||
|
||||
|
||||
def _fmt_when(when, indent):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
|
||||
def _fmt_when(when: "spack.spec.Spec", indent: int):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
|
||||
|
||||
|
||||
def _fmt_variant_description(variant, width, indent):
|
||||
@@ -441,7 +441,7 @@ def get_url(version):
|
||||
return "No URL"
|
||||
|
||||
url = get_url(preferred) if pkg.has_code else ""
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
|
||||
color.cwrite(line)
|
||||
|
||||
print()
|
||||
@@ -464,7 +464,7 @@ def get_url(version):
|
||||
continue
|
||||
|
||||
for v, url in vers:
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
@@ -475,10 +475,7 @@ def print_virtuals(pkg, args):
|
||||
color.cprint(section_title("Virtual Packages: "))
|
||||
if pkg.provided:
|
||||
for when, specs in reversed(sorted(pkg.provided.items())):
|
||||
line = " %s provides %s" % (
|
||||
when.colorized(),
|
||||
", ".join(s.colorized() for s in specs),
|
||||
)
|
||||
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
|
||||
print(line)
|
||||
|
||||
else:
|
||||
@@ -497,7 +494,9 @@ def print_licenses(pkg, args):
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
|
||||
str(when_spec)
|
||||
)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
|
@@ -71,6 +71,11 @@ def setup_parser(subparser):
|
||||
help="the number of versions to fetch for each spec, choose 'all' to"
|
||||
" retrieve all versions of each package",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(create_parser, ["specs"])
|
||||
arguments.add_concretizer_args(create_parser)
|
||||
|
||||
@@ -108,6 +113,11 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--autopush",
|
||||
action="store_true",
|
||||
help=("set mirror to push automatically after installation"),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
@@ -175,6 +185,21 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_autopush.add_argument(
|
||||
"--autopush",
|
||||
help="set mirror to push automatically after installation",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_autopush.add_argument(
|
||||
"--no-autopush",
|
||||
help="set mirror to not push automatically after installation",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
@@ -218,6 +243,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
@@ -234,6 +260,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.autopush:
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
@@ -270,6 +298,8 @@ def _configure_mirror(args):
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
changes["autopush"] = args.autopush
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
@@ -334,7 +364,6 @@ def concrete_specs_from_user(args):
|
||||
specs = filter_externals(specs)
|
||||
specs = list(set(specs))
|
||||
specs.sort(key=lambda s: (s.name, s.version))
|
||||
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
|
||||
return specs
|
||||
|
||||
|
||||
@@ -379,36 +408,50 @@ def concrete_specs_from_cli_or_file(args):
|
||||
return specs
|
||||
|
||||
|
||||
def not_excluded_fn(args):
|
||||
"""Return a predicate that evaluate to True if a spec was not explicitly
|
||||
excluded by the user.
|
||||
"""
|
||||
exclude_specs = []
|
||||
if args.exclude_file:
|
||||
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
class IncludeFilter:
|
||||
def __init__(self, args):
|
||||
self.exclude_specs = []
|
||||
if args.exclude_file:
|
||||
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
self.private = args.private
|
||||
|
||||
def not_excluded(x):
|
||||
return not any(x.satisfies(y) for y in exclude_specs)
|
||||
def __call__(self, x):
|
||||
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
|
||||
|
||||
return not_excluded
|
||||
def _not_license_excluded(self, x):
|
||||
"""True if the spec is for a private mirror, or as long as the
|
||||
package does not explicitly forbid redistributing source."""
|
||||
if self.private:
|
||||
return True
|
||||
elif x.package_class.redistribute_source(x):
|
||||
return True
|
||||
else:
|
||||
tty.debug(
|
||||
"Skip adding {0} to mirror: the package.py file"
|
||||
" indicates that a public mirror should not contain"
|
||||
" it.".format(x.name)
|
||||
)
|
||||
return False
|
||||
|
||||
def _not_cmdline_excluded(self, x):
|
||||
"""True if a spec was not explicitly excluded by the user."""
|
||||
return not any(x.satisfies(y) for y in self.exclude_specs)
|
||||
|
||||
|
||||
def concrete_specs_from_environment(selection_fn):
|
||||
def concrete_specs_from_environment():
|
||||
env = ev.active_environment()
|
||||
assert env, "an active environment is required"
|
||||
mirror_specs = env.all_specs()
|
||||
mirror_specs = filter_externals(mirror_specs)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
def all_specs_with_all_versions(selection_fn):
|
||||
def all_specs_with_all_versions():
|
||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
@@ -429,12 +472,6 @@ def versions_per_spec(args):
|
||||
return num_versions
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def process_mirror_stats(present, mirrored, error):
|
||||
p, m, e = len(present), len(mirrored), len(error)
|
||||
tty.msg(
|
||||
@@ -480,30 +517,28 @@ def mirror_create(args):
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
mirror_specs, mirror_fn = _specs_and_action(args)
|
||||
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
|
||||
|
||||
|
||||
def _specs_and_action(args):
|
||||
include_fn = IncludeFilter(args)
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
mirror_specs = all_specs_with_all_versions()
|
||||
mirror_fn = create_mirror_for_all_specs
|
||||
elif args.all and ev.active_environment():
|
||||
mirror_specs = concrete_specs_from_environment()
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
else:
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
|
||||
if args.all and ev.active_environment():
|
||||
create_mirror_for_all_specs_inside_environment(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
|
||||
return mirror_specs, mirror_fn
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
|
||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
@@ -515,11 +550,10 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def mirror_destroy(args):
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
@@ -107,7 +108,6 @@ def _parse_link_paths(string):
|
||||
"""
|
||||
lib_search_paths = False
|
||||
raw_link_dirs = []
|
||||
tty.debug("parsing implicit link info")
|
||||
for line in string.splitlines():
|
||||
if lib_search_paths:
|
||||
if line.startswith("\t"):
|
||||
@@ -122,7 +122,7 @@ def _parse_link_paths(string):
|
||||
continue
|
||||
if _LINKER_LINE_IGNORE.match(line):
|
||||
continue
|
||||
tty.debug("linker line: %s" % line)
|
||||
tty.debug(f"implicit link dirs: link line: {line}")
|
||||
|
||||
next_arg = False
|
||||
for arg in line.split():
|
||||
@@ -138,15 +138,12 @@ def _parse_link_paths(string):
|
||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("linkdir: %s" % link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("libpath: %s", link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
|
||||
|
||||
implicit_link_dirs = list()
|
||||
visited = set()
|
||||
@@ -156,7 +153,7 @@ def _parse_link_paths(string):
|
||||
implicit_link_dirs.append(normalized_path)
|
||||
visited.add(normalized_path)
|
||||
|
||||
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
|
||||
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@@ -417,17 +414,35 @@ def real_version(self):
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self):
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
# Put CXX first since it has the most linking issues
|
||||
# And because it has flags that affect linking
|
||||
link_dirs = self._get_compiler_link_paths()
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
link_dirs = _parse_non_system_link_dirs(output)
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
@property
|
||||
def required_libs(self):
|
||||
"""For executables created with this compiler, the compiler libraries
|
||||
@@ -436,17 +451,17 @@ def required_libs(self):
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
def _get_compiler_link_paths(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
# Cannot determine implicit link paths without a compiler / verbose flag
|
||||
return []
|
||||
|
||||
# What flag types apply to first_compiler, in what order
|
||||
if cc == self.cc:
|
||||
flags = ["cflags", "cppflags", "ldflags"]
|
||||
else:
|
||||
flags = ["cxxflags", "cppflags", "ldflags"]
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
@@ -458,20 +473,19 @@ def _get_compiler_link_paths(self):
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in flags:
|
||||
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
with self.compiler_environment():
|
||||
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
return _parse_non_system_link_dirs(output)
|
||||
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
return []
|
||||
return None
|
||||
finally:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
def verbose_flag(self) -> Optional[str]:
|
||||
"""
|
||||
This property should be overridden in the compiler subclass if a
|
||||
verbose flag is available.
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -109,27 +110,33 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.CONFIG.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
@@ -216,13 +223,15 @@ def _compiler_config_from_external(config):
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -233,7 +242,7 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
@@ -246,7 +255,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -295,7 +306,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -310,21 +323,28 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
@@ -332,7 +352,7 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -492,11 +512,20 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@@ -507,7 +536,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -517,7 +546,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -603,7 +632,10 @@ def _compiler_from_config_entry(items):
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
compiler = compiler_from_dict(items)
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
@@ -656,7 +688,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
@@ -933,10 +967,11 @@ def _default_make_compilers(cmp_id, paths):
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers = []
|
||||
compilers: List["spack.compiler.Compiler"] = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
compilers.extend(make_compilers(compiler_id, compiler))
|
||||
candidates = make_compilers(compiler_id, compiler)
|
||||
compilers.extend(x for x in candidates if x.cc is not None)
|
||||
|
||||
return compilers
|
||||
|
||||
|
@@ -38,10 +38,10 @@ class Clang(Compiler):
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang"]
|
||||
f77_names = ["flang-new", "flang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
fc_names = ["flang-new", "flang"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@@ -171,10 +171,11 @@ def extract_version_from_output(cls, output):
|
||||
|
||||
match = re.search(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
|
||||
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)",
|
||||
output,
|
||||
)
|
||||
if match:
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Set
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,15 +20,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version, VersionRange
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
"2021.2.1": "19.28.29913",
|
||||
"2021.2.0": "19.28.29334",
|
||||
"2021.1.0": "19.28.29333",
|
||||
}
|
||||
FC_PATH: Dict[str, str] = dict()
|
||||
|
||||
|
||||
class CmdCall:
|
||||
@@ -115,15 +107,13 @@ def command_str(self):
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
def get_valid_fortran_pth():
|
||||
"""Assign maximum available fortran compiler version"""
|
||||
# TODO (johnwparent): validate compatibility w/ try compiler
|
||||
# functionality when added
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
|
||||
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
|
||||
|
||||
|
||||
class Msvc(Compiler):
|
||||
@@ -167,11 +157,9 @@ def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
latest_fc = get_valid_fortran_pth()
|
||||
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
|
||||
paths[2:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -183,7 +171,7 @@ def __init__(self, *args, **kwargs):
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
@@ -198,11 +186,34 @@ def __init__(self, *args, **kwargs):
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
|
||||
def get_oneapi_root(pth: str):
|
||||
"""From within a prefix known to be a oneAPI path
|
||||
determine the oneAPI root path from arbitrary point
|
||||
under root
|
||||
|
||||
Args:
|
||||
pth: path prefixed within oneAPI root
|
||||
"""
|
||||
if not pth:
|
||||
return ""
|
||||
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
|
||||
pth = os.path.dirname(pth)
|
||||
return pth
|
||||
|
||||
# If this found, it sets all the vars
|
||||
oneapi_root = os.path.join(self.cc, "../../..")
|
||||
oneapi_root = get_oneapi_root(self.fc)
|
||||
if not oneapi_root:
|
||||
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
# some oneAPI exes return a version more precise than their
|
||||
# install paths specify, so we determine path from
|
||||
# the install path rather than the fc executable itself
|
||||
numver = r"\d+\.\d+(?:\.\d+)?"
|
||||
pattern = f"((?:{numver})|(?:latest))"
|
||||
version_from_path = re.search(pattern, self.fc).group(1)
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
@@ -314,23 +325,19 @@ def setup_custom_environment(self, pkg, env):
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# We're using intel for the Fortran compilers, which exist if
|
||||
# ONEAPI_ROOT is a meaningful variable
|
||||
if not sys.platform == "win32":
|
||||
return "unknown"
|
||||
fc_ver = cls.default_version(fc)
|
||||
avail_fc_version.add(fc_ver)
|
||||
fc_path[fc_ver] = fc
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError("Windows compiler search paths not established")
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
ver = cls.default_version(clp)
|
||||
else:
|
||||
ver = fc_ver
|
||||
return ver
|
||||
FC_PATH[fc_ver] = fc
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError(
|
||||
"Windows compiler search paths not established, "
|
||||
"please report this behavior to github.com/spack/spack"
|
||||
)
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
return cls.default_version(clp) if clp else fc_ver
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
|
@@ -64,7 +64,7 @@ def verbose_flag(self):
|
||||
#
|
||||
# This way, we at least enable the implicit rpath detection, which is
|
||||
# based on compilation of a C file (see method
|
||||
# spack.compiler._get_compiler_link_paths): in the case of a mixed
|
||||
# spack.compiler._compile_dummy_c_source): in the case of a mixed
|
||||
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
|
||||
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
|
||||
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'
|
||||
|
@@ -1562,8 +1562,9 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
"""Use the configuration scopes passed as arguments within the context manager.
|
||||
|
||||
This function invalidates caches, and is therefore very slow.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
@@ -12,9 +12,31 @@
|
||||
},
|
||||
"os_package_manager": "yum_amazon"
|
||||
},
|
||||
"fedora:40": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:40"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora40",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:40"
|
||||
}
|
||||
},
|
||||
"fedora:39": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:39"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora39",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:39"
|
||||
}
|
||||
},
|
||||
"fedora:38": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
@@ -25,7 +47,7 @@
|
||||
},
|
||||
"fedora:37": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from json import JSONDecoder
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
@@ -818,7 +819,8 @@ def _read_from_file(self, filename):
|
||||
"""
|
||||
try:
|
||||
with open(filename, "r") as f:
|
||||
fdata = sjson.load(f)
|
||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||
except Exception as e:
|
||||
raise CorruptDatabaseError("error parsing database:", str(e)) from e
|
||||
|
||||
@@ -833,27 +835,24 @@ def check(cond, msg):
|
||||
|
||||
# High-level file checks
|
||||
db = fdata["database"]
|
||||
check("installs" in db, "no 'installs' in JSON DB.")
|
||||
check("version" in db, "no 'version' in JSON DB.")
|
||||
|
||||
installs = db["installs"]
|
||||
|
||||
# TODO: better version checking semantics.
|
||||
version = vn.Version(db["version"])
|
||||
if version > _DB_VERSION:
|
||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
|
||||
elif version < _DB_VERSION:
|
||||
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
|
||||
tty.warn(
|
||||
"Spack database version changed from %s to %s. Upgrading."
|
||||
% (version, _DB_VERSION)
|
||||
)
|
||||
elif version < _DB_VERSION and not any(
|
||||
old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX
|
||||
):
|
||||
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
|
||||
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items()
|
||||
)
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
|
||||
)
|
||||
else:
|
||||
check("installs" in db, "no 'installs' in JSON DB.")
|
||||
installs = db["installs"]
|
||||
|
||||
spec_reader = reader(version)
|
||||
|
||||
|
@@ -83,26 +83,15 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def get_elf_compat(path):
|
||||
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
|
||||
it is host-compatible."""
|
||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
||||
# libraries, by dropping those that are not host compatible.
|
||||
with open(path, "rb") as f:
|
||||
elf = elf_utils.parse_elf(f, only_header=True)
|
||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
"""Accept an ELF file if the header matches the given compat triplet,
|
||||
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
|
||||
static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
|
||||
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
# Fast path: assume libraries at least have .so in their basename.
|
||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||
if ".so" not in os.path.basename(path):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
try:
|
||||
return host_compat == get_elf_compat(path)
|
||||
return host_compat == elf_utils.get_elf_compat(path)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
|
||||
@@ -155,7 +144,7 @@ def libraries_in_ld_and_system_library_path(
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
|
||||
try:
|
||||
host_compat = get_elf_compat(sys.executable)
|
||||
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||
accept = lambda path: accept_elf(path, host_compat)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
accept = llnl.util.filesystem.is_readable_file
|
||||
|
@@ -11,6 +11,7 @@
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
@@ -32,6 +33,8 @@ class ExpectedTestResult(NamedTuple):
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
#: Attributes expected in the external spec
|
||||
extra_attributes: Dict[str, str]
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
@@ -100,7 +103,10 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
return [
|
||||
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
|
||||
for item in self.test.results
|
||||
]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
@@ -117,9 +123,13 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
current_platform = str(spack.platforms.host())
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
if current_platform not in single_test_data.get("platforms", [current_platform]):
|
||||
continue
|
||||
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
@@ -127,7 +137,11 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
expected_results.append(
|
||||
ExpectedTestResult(
|
||||
spec=assertion["spec"], extra_attributes=assertion.get("extra_attributes", {})
|
||||
)
|
||||
)
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
|
@@ -27,6 +27,7 @@ class OpenMpi(Package):
|
||||
* ``variant``
|
||||
* ``version``
|
||||
* ``requires``
|
||||
* ``redistribute``
|
||||
|
||||
"""
|
||||
import collections
|
||||
@@ -63,6 +64,7 @@ class OpenMpi(Package):
|
||||
__all__ = [
|
||||
"DirectiveError",
|
||||
"DirectiveMeta",
|
||||
"DisableRedistribute",
|
||||
"version",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
@@ -75,6 +77,7 @@ class OpenMpi(Package):
|
||||
"resource",
|
||||
"build_system",
|
||||
"requires",
|
||||
"redistribute",
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
@@ -598,6 +601,64 @@ def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_depends_on
|
||||
|
||||
|
||||
#: Store whether a given Spec source/binary should not be redistributed.
|
||||
class DisableRedistribute:
|
||||
def __init__(self, source, binary):
|
||||
self.source = source
|
||||
self.binary = binary
|
||||
|
||||
|
||||
@directive("disable_redistribute")
|
||||
def redistribute(source=None, binary=None, when: WhenType = None):
|
||||
"""Can be used inside a Package definition to declare that
|
||||
the package source and/or compiled binaries should not be
|
||||
redistributed.
|
||||
|
||||
By default, Packages allow source/binary distribution (i.e. in
|
||||
mirrors). Because of this, and because overlapping enable/
|
||||
disable specs are not allowed, this directive only allows users
|
||||
to explicitly disable redistribution for specs.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_redistribute(pkg, source, binary, when)
|
||||
|
||||
|
||||
def _execute_redistribute(
|
||||
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
|
||||
):
|
||||
if source is None and binary is None:
|
||||
return
|
||||
elif (source is True) or (binary is True):
|
||||
raise DirectiveError(
|
||||
"Source/binary distribution are true by default, they can only "
|
||||
"be explicitly disabled."
|
||||
)
|
||||
|
||||
if source is None:
|
||||
source = True
|
||||
if binary is None:
|
||||
binary = True
|
||||
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
if source is False:
|
||||
max_constraint = spack.spec.Spec(f"{pkg.name}@{when_spec.versions}")
|
||||
if not max_constraint.satisfies(when_spec):
|
||||
raise DirectiveError("Source distribution can only be disabled for versions")
|
||||
|
||||
if when_spec in pkg.disable_redistribute:
|
||||
disable = pkg.disable_redistribute[when_spec]
|
||||
if not source:
|
||||
disable.source = True
|
||||
if not binary:
|
||||
disable.binary = True
|
||||
else:
|
||||
pkg.disable_redistribute[when_spec] = DisableRedistribute(
|
||||
source=not source, binary=not binary
|
||||
)
|
||||
|
||||
|
||||
@directive(("extendees", "dependencies"))
|
||||
def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||
"""Same as depends_on, but also adds this package to the extendee list.
|
||||
|
@@ -106,17 +106,16 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
return path_str
|
||||
|
||||
|
||||
def check_disallowed_env_config_mods(scopes):
|
||||
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||
for scope in scopes:
|
||||
with spack.config.use_configuration(scope):
|
||||
if spack.config.get("config:environments_root"):
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
return scopes
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
@@ -1427,7 +1426,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config(init_config=True)
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
@@ -2463,6 +2462,10 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
#: invalidated by a re-read of the manifest file.
|
||||
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -2808,16 +2811,19 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest.
|
||||
|
||||
Returns: All configuration scopes associated with the environment
|
||||
"""
|
||||
config_name = self.scope_name
|
||||
env_scope = spack.config.SingleFileScope(
|
||||
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
|
||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||
if self._config_scopes is not None:
|
||||
return self._config_scopes
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
self._config_scopes = scopes
|
||||
return scopes
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
|
@@ -662,9 +662,6 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||
return
|
||||
|
||||
# Drop externals
|
||||
for s in specs:
|
||||
if s.external:
|
||||
tty.warn("Skipping external package: " + s.short_spec)
|
||||
specs = [s for s in specs if not s.external]
|
||||
|
||||
self._sanity_check_view_projection(specs)
|
||||
|
27
lib/spack/spack/hooks/autopush.py
Normal file
27
lib/spack/spack/hooks/autopush.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.mirror
|
||||
|
||||
|
||||
def post_install(spec, explicit):
|
||||
# Push package to all buildcaches with autopush==True
|
||||
|
||||
# Do nothing if package was not installed from source
|
||||
pkg = spec.package
|
||||
if pkg.installed_from_binary_cache:
|
||||
return
|
||||
|
||||
# Push the package to all autopush mirrors
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
mirror.push_url,
|
||||
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
|
||||
)
|
||||
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
|
@@ -489,6 +489,9 @@ def _process_binary_cache_tarball(
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
return True
|
||||
@@ -976,7 +979,11 @@ def __init__(
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
|
@@ -137,6 +137,12 @@ def source(self):
|
||||
def signed(self) -> bool:
|
||||
return isinstance(self._data, str) or self._data.get("signed", True)
|
||||
|
||||
@property
|
||||
def autopush(self) -> bool:
|
||||
if isinstance(self._data, str):
|
||||
return False
|
||||
return self._data.get("autopush", False)
|
||||
|
||||
@property
|
||||
def fetch_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
@@ -150,7 +156,7 @@ def push_url(self):
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed"]
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
@@ -286,6 +292,7 @@ def __init__(
|
||||
scope=None,
|
||||
binary: Optional[bool] = None,
|
||||
source: Optional[bool] = None,
|
||||
autopush: Optional[bool] = None,
|
||||
):
|
||||
"""Initialize a mirror collection.
|
||||
|
||||
@@ -297,21 +304,27 @@ def __init__(
|
||||
If None, do not filter on binary mirrors.
|
||||
source: If True, only include source mirrors.
|
||||
If False, omit source mirrors.
|
||||
If None, do not filter on source mirrors."""
|
||||
self._mirrors = {
|
||||
name: Mirror(data=mirror, name=name)
|
||||
for name, mirror in (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
}
|
||||
If None, do not filter on source mirrors.
|
||||
autopush: If True, only include mirrors that have autopush enabled.
|
||||
If False, omit mirrors that have autopush enabled.
|
||||
If None, do not filter on autopush."""
|
||||
mirrors_data = (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
mirrors = (Mirror(data=mirror, name=name) for name, mirror in mirrors_data)
|
||||
|
||||
if source is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
|
||||
def _filter(m: Mirror):
|
||||
if source is not None and m.source != source:
|
||||
return False
|
||||
if binary is not None and m.binary != binary:
|
||||
return False
|
||||
if autopush is not None and m.autopush != autopush:
|
||||
return False
|
||||
return True
|
||||
|
||||
if binary is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
|
||||
self._mirrors = {m.name: m for m in mirrors if _filter(m)}
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._mirrors == other._mirrors
|
||||
|
@@ -83,6 +83,17 @@ def configuration(module_set_name):
|
||||
)
|
||||
|
||||
|
||||
_FORMAT_STRING_RE = re.compile(r"({[^}]*})")
|
||||
|
||||
|
||||
def _format_env_var_name(spec, var_name_fmt):
|
||||
"""Format the variable name, but uppercase any formatted fields."""
|
||||
fmt_parts = _FORMAT_STRING_RE.split(var_name_fmt)
|
||||
return "".join(
|
||||
spec.format(part).upper() if _FORMAT_STRING_RE.match(part) else part for part in fmt_parts
|
||||
)
|
||||
|
||||
|
||||
def _check_tokens_are_valid(format_string, message):
|
||||
"""Checks that the tokens used in the format string are valid in
|
||||
the context of module file and environment variable naming.
|
||||
@@ -737,20 +748,12 @@ def environment_modifications(self):
|
||||
exclude = self.conf.exclude_env_vars
|
||||
|
||||
# We may have tokens to substitute in environment commands
|
||||
|
||||
# Prepare a suitable transformation dictionary for the names
|
||||
# of the environment variables. This means turn the valid
|
||||
# tokens uppercase.
|
||||
transform = {}
|
||||
for token in _valid_tokens:
|
||||
transform[token] = lambda s, string: str.upper(string)
|
||||
|
||||
for x in env:
|
||||
# Ensure all the tokens are valid in this context
|
||||
msg = "some tokens cannot be expanded in an environment variable name"
|
||||
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = self.spec.format(x.name, transform=transform)
|
||||
x.name = _format_env_var_name(self.spec, x.name)
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
|
@@ -73,17 +73,24 @@ def vs_install_paths(self):
|
||||
def msvc_paths(self):
|
||||
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
|
||||
|
||||
@property
|
||||
def oneapi_root(self):
|
||||
root = os.environ.get("ONEAPI_ROOT", "") or os.path.join(
|
||||
os.environ.get("ProgramFiles(x86)", ""), "Intel", "oneAPI"
|
||||
)
|
||||
if os.path.exists(root):
|
||||
return root
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
# First Strategy: Find MSVC directories using vswhere
|
||||
_compiler_search_paths = []
|
||||
for p in self.msvc_paths:
|
||||
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
oneapi_root = self.oneapi_root
|
||||
if oneapi_root:
|
||||
_compiler_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
glob.glob(os.path.join(oneapi_root, "compiler", "**", "bin"), recursive=True)
|
||||
)
|
||||
|
||||
# Second strategy: Find MSVC via the registry
|
||||
|
@@ -468,7 +468,41 @@ def _names(when_indexed_dictionary):
|
||||
return sorted(all_names)
|
||||
|
||||
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
class RedistributionMixin:
|
||||
"""Logic for determining whether a Package is source/binary
|
||||
redistributable.
|
||||
"""
|
||||
|
||||
#: Store whether a given Spec source/binary should not be
|
||||
#: redistributed.
|
||||
disable_redistribute: Dict["spack.spec.Spec", "spack.directives.DisableRedistribute"]
|
||||
|
||||
# Source redistribution must be determined before concretization
|
||||
# (because source mirrors work with un-concretized Specs).
|
||||
@classmethod
|
||||
def redistribute_source(cls, spec):
|
||||
"""Whether it should be possible to add the source of this
|
||||
package to a Spack mirror.
|
||||
"""
|
||||
for when_spec, disable_redistribute in cls.disable_redistribute.items():
|
||||
if disable_redistribute.source and spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def redistribute_binary(self):
|
||||
"""Whether it should be possible to create a binary out of an
|
||||
installed instance of this package.
|
||||
"""
|
||||
for when_spec, disable_redistribute in self.__class__.disable_redistribute.items():
|
||||
if disable_redistribute.binary and self.spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass=PackageMeta):
|
||||
"""This is the superclass for all spack packages.
|
||||
|
||||
***The Package class***
|
||||
|
@@ -726,14 +726,14 @@ def first_repo(self):
|
||||
"""Get the first repo in precedence order."""
|
||||
return self.repos[0] if self.repos else None
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _all_package_names_set(self, include_virtuals):
|
||||
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _all_package_names(self, include_virtuals):
|
||||
"""Return all unique package names in all repositories."""
|
||||
all_pkgs = set()
|
||||
for repo in self.repos:
|
||||
for name in repo.all_package_names(include_virtuals):
|
||||
all_pkgs.add(name)
|
||||
return sorted(all_pkgs, key=lambda n: n.lower())
|
||||
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
|
||||
|
||||
def all_package_names(self, include_virtuals=False):
|
||||
return self._all_package_names(include_virtuals)
|
||||
@@ -794,7 +794,11 @@ def patch_index(self):
|
||||
|
||||
@autospec
|
||||
def providers_for(self, vpkg_spec):
|
||||
providers = self.provider_index.providers_for(vpkg_spec)
|
||||
providers = [
|
||||
spec
|
||||
for spec in self.provider_index.providers_for(vpkg_spec)
|
||||
if spec.name in self._all_package_names_set(include_virtuals=False)
|
||||
]
|
||||
if not providers:
|
||||
raise UnknownPackageError(vpkg_spec.fullname)
|
||||
return providers
|
||||
|
@@ -46,6 +46,7 @@
|
||||
"signed": {"type": "boolean"},
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
"autopush": {"type": "boolean"},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
}
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import types
|
||||
import typing
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import archspec.cpu
|
||||
@@ -40,6 +41,8 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.elf
|
||||
import spack.util.libc
|
||||
import spack.util.path
|
||||
import spack.util.timer
|
||||
import spack.variant
|
||||
@@ -119,6 +122,17 @@ def __str__(self):
|
||||
return f"{self._name_.lower()}"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def spec_with_name(spec, name):
|
||||
"""Context manager to temporarily set the name of a spec"""
|
||||
old_name = spec.name
|
||||
spec.name = name
|
||||
try:
|
||||
yield spec
|
||||
finally:
|
||||
spec.name = old_name
|
||||
|
||||
|
||||
class RequirementKind(enum.Enum):
|
||||
"""Purpose / provenance of a requirement"""
|
||||
|
||||
@@ -267,8 +281,36 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
def all_compilers_in_config(configuration):
|
||||
return spack.compilers.all_compilers_from(configuration)
|
||||
|
||||
|
||||
def all_libcs() -> Set[spack.spec.Spec]:
|
||||
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
|
||||
libc determined from the current Python process if dynamically linked."""
|
||||
|
||||
libcs = {
|
||||
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
|
||||
}
|
||||
|
||||
if libcs:
|
||||
return libcs
|
||||
|
||||
libc = spack.util.libc.libc_from_current_python_process()
|
||||
return {libc} if libc else set()
|
||||
|
||||
|
||||
def libc_is_compatible(lhs: spack.spec.Spec, rhs: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||
return (
|
||||
lhs.name == rhs.name
|
||||
and lhs.external_path == rhs.external_path
|
||||
and lhs.version >= rhs.version
|
||||
)
|
||||
|
||||
|
||||
def using_libc_compatibility() -> bool:
|
||||
"""Returns True if we are currently using libc compatibility"""
|
||||
return spack.platforms.host().name == "linux"
|
||||
|
||||
|
||||
def extend_flag_list(flag_list, new_flags):
|
||||
@@ -554,6 +596,23 @@ def _spec_with_default_name(spec_str, name):
|
||||
return spec
|
||||
|
||||
|
||||
def _external_config_with_implicit_externals(configuration):
|
||||
# Read packages.yaml and normalize it, so that it will not contain entries referring to
|
||||
# virtual packages.
|
||||
packages_yaml = _normalize_packages_yaml(configuration.get("packages"))
|
||||
|
||||
# Add externals for libc from compilers on Linux
|
||||
if not using_libc_compatibility():
|
||||
return packages_yaml
|
||||
|
||||
for compiler in all_compilers_in_config(configuration):
|
||||
libc = compiler.default_libc
|
||||
if libc:
|
||||
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
|
||||
packages_yaml.setdefault(libc.name, {}).setdefault("externals", []).append(entry)
|
||||
return packages_yaml
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
@@ -688,8 +747,9 @@ def on_model(model):
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
|
||||
#: Data class to collect information on a requirement
|
||||
class RequirementRule(NamedTuple):
|
||||
"""Data class to collect information on a requirement"""
|
||||
|
||||
pkg_name: str
|
||||
policy: str
|
||||
requirements: List["spack.spec.Spec"]
|
||||
@@ -698,6 +758,27 @@ class RequirementRule(NamedTuple):
|
||||
message: Optional[str]
|
||||
|
||||
|
||||
class KnownCompiler(NamedTuple):
|
||||
"""Data class to collect information on compilers"""
|
||||
|
||||
spec: "spack.spec.Spec"
|
||||
os: str
|
||||
target: str
|
||||
available: bool
|
||||
compiler_obj: Optional["spack.compiler.Compiler"]
|
||||
|
||||
def _key(self):
|
||||
return self.spec, self.os, self.target
|
||||
|
||||
def __eq__(self, other: object):
|
||||
if not isinstance(other, KnownCompiler):
|
||||
return NotImplemented
|
||||
return self._key() == other._key()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key())
|
||||
|
||||
|
||||
class PyclingoDriver:
|
||||
def __init__(self, cores=True):
|
||||
"""Driver for the Python clingo interface.
|
||||
@@ -750,10 +831,16 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
||||
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
|
||||
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
|
||||
if using_libc_compatibility():
|
||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||
else:
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -950,6 +1037,9 @@ def __init__(self, tests: bool = False):
|
||||
self.pkgs: Set[str] = set()
|
||||
self.explicitly_required_namespaces: Dict[str, str] = {}
|
||||
|
||||
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
|
||||
self.libcs: List[spack.spec.Spec] = []
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
@@ -1065,37 +1155,29 @@ def compiler_facts(self):
|
||||
"""Facts about available compilers."""
|
||||
|
||||
self.gen.h2("Available compilers")
|
||||
indexed_possible_compilers = list(enumerate(self.possible_compilers))
|
||||
for compiler_id, compiler in indexed_possible_compilers:
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
self.gen.fact(fn.compiler_id(compiler_id))
|
||||
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
|
||||
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
|
||||
|
||||
if compiler.operating_system:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
|
||||
|
||||
if compiler.target == "any":
|
||||
compiler.target = None
|
||||
if compiler.os:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.os))
|
||||
|
||||
if compiler.target is not None:
|
||||
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
|
||||
|
||||
for flag_type, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
if compiler.compiler_obj is not None:
|
||||
c = compiler.compiler_obj
|
||||
for flag_type, flags in c.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
|
||||
if compiler.available:
|
||||
self.gen.fact(fn.compiler_available(compiler_id))
|
||||
|
||||
self.gen.fact(fn.compiler_weight(compiler_id, compiler_id))
|
||||
self.gen.newline()
|
||||
|
||||
# Set compiler defaults, given a list of possible compilers
|
||||
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
|
||||
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
|
||||
|
||||
for weight, (compiler_id, cspec) in enumerate(matches):
|
||||
f = fn.compiler_weight(compiler_id, weight)
|
||||
self.gen.fact(f)
|
||||
|
||||
def package_requirement_rules(self, pkg):
|
||||
parser = RequirementParser(spack.config.CONFIG)
|
||||
self.emit_facts_from_requirement_rules(parser.rules(pkg))
|
||||
@@ -1309,34 +1391,39 @@ def condition(
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
if not named_cond.name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{named_cond}'")
|
||||
name = required_spec.name or name
|
||||
if not name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the caller,
|
||||
# we won't emit partial facts.
|
||||
with spec_with_name(required_spec, name):
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
trigger_id = self._get_condition_id(
|
||||
named_cond, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
condition_id = next(self._id_counter)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
|
||||
for pred in imposed_constraints:
|
||||
@@ -1523,12 +1610,8 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
requirement_weight += 1
|
||||
|
||||
def external_packages(self):
|
||||
"""Facts on external packages, as read from packages.yaml"""
|
||||
# Read packages.yaml and normalize it, so that it
|
||||
# will not contain entries referring to virtual
|
||||
# packages.
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
"""Facts on external packages, from packages.yaml and implicit externals."""
|
||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
|
||||
self.gen.h1("External packages")
|
||||
for pkg_name, data in packages_yaml.items():
|
||||
@@ -1579,6 +1662,7 @@ def external_imposition(input_spec, requirements):
|
||||
self.gen.newline()
|
||||
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
"""Facts on concretization preferences, as read from packages.yaml"""
|
||||
@@ -1624,23 +1708,6 @@ def target_preferences(self):
|
||||
for i, preferred in enumerate(package_targets):
|
||||
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
|
||||
# types of flags that can be on specs
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
# flags from compilers.yaml
|
||||
compilers = all_compilers_in_config()
|
||||
for compiler in compilers:
|
||||
for name, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(
|
||||
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
|
||||
)
|
||||
|
||||
def spec_clauses(
|
||||
self,
|
||||
spec: spack.spec.Spec,
|
||||
@@ -1817,6 +1884,16 @@ def _spec_clauses(
|
||||
if dep.name == "gcc-runtime":
|
||||
continue
|
||||
|
||||
# libc is also solved again by clingo, but in this case the compatibility
|
||||
# is not encoded in the parent node - so we need to emit explicit facts
|
||||
if "libc" in dspec.virtuals:
|
||||
for libc in self.libcs:
|
||||
if libc_is_compatible(libc, dep):
|
||||
clauses.append(
|
||||
fn.attr("compatible_libc", spec.name, libc.name, libc.version)
|
||||
)
|
||||
continue
|
||||
|
||||
# We know dependencies are real for concrete specs. For abstract
|
||||
# specs they just mean the dep is somehow in the DAG.
|
||||
for dtype in dt.ALL_FLAGS:
|
||||
@@ -2046,9 +2123,16 @@ def target_defaults(self, specs):
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
for compiler_id, known_compiler in enumerate(self.possible_compilers):
|
||||
if not known_compiler.available:
|
||||
continue
|
||||
|
||||
compiler = known_compiler.compiler_obj
|
||||
# Stub support for cross-compilation, to be expanded later
|
||||
if compiler.target is not None and compiler.target != str(uarch.family):
|
||||
if known_compiler.target is not None and compiler.target not in (
|
||||
str(uarch.family),
|
||||
"any",
|
||||
):
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
|
||||
self.gen.newline()
|
||||
continue
|
||||
@@ -2104,58 +2188,6 @@ def virtual_providers(self):
|
||||
self.gen.fact(fn.virtual(vspec))
|
||||
self.gen.newline()
|
||||
|
||||
def generate_possible_compilers(self, specs):
|
||||
compilers = all_compilers_in_config()
|
||||
|
||||
# Search for compilers which differs only by aspects that are
|
||||
# not selectable by users using the spec syntax
|
||||
seen, sanitized_list = set(), []
|
||||
for compiler in compilers:
|
||||
key = compiler.spec, compiler.operating_system, compiler.target
|
||||
if key in seen:
|
||||
warnings.warn(
|
||||
f"duplicate found for {compiler.spec} on "
|
||||
f"{compiler.operating_system}/{compiler.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
sanitized_list.append(compiler)
|
||||
seen.add(key)
|
||||
|
||||
cspecs = set([c.spec for c in compilers])
|
||||
|
||||
# add compiler specs from the input line to possibilities if we
|
||||
# don't require compilers to exist.
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(c.satisfies(s.compiler) for c in cspecs):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compilers.append(
|
||||
compiler_cls(s.compiler, operating_system=None, target=None, paths=[None] * 4)
|
||||
)
|
||||
self.gen.fact(fn.allow_compiler(s.compiler.name, version))
|
||||
|
||||
return list(
|
||||
sorted(
|
||||
compilers,
|
||||
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
def define_version_constraints(self):
|
||||
"""Define what version_satisfies(...) means in ASP logic."""
|
||||
for pkg_name, versions in sorted(self.version_constraints):
|
||||
@@ -2317,6 +2349,7 @@ def setup(
|
||||
node_counter = _create_counter(specs, tests=self.tests)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
@@ -2326,11 +2359,17 @@ def setup(
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
|
||||
for node in spack.traverse.traverse_nodes(specs):
|
||||
for node in traverse.traverse_nodes(specs):
|
||||
if node.namespace is not None:
|
||||
self.explicitly_required_namespaces[node.name] = node.namespace
|
||||
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs)
|
||||
|
||||
if using_libc_compatibility():
|
||||
for libc in self.libcs:
|
||||
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
|
||||
@@ -2349,17 +2388,17 @@ def setup(
|
||||
)
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
if reuse:
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
compiler_parser.add_compiler_from_concrete_spec(reusable_spec)
|
||||
self.register_concrete_spec(reusable_spec, self.pkgs)
|
||||
self.concrete_specs()
|
||||
|
||||
self.possible_compilers = compiler_parser.possible_compilers()
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
@@ -2460,15 +2499,42 @@ def visit(node):
|
||||
def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
|
||||
for compiler in self.possible_compilers:
|
||||
compiler_with_different_cls_names = {"oneapi": "intel-oneapi-compilers"}
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(compiler.name, compiler.name)
|
||||
compiler_with_different_cls_names = {
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"clang": "llvm",
|
||||
}
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(
|
||||
compiler.spec.name, compiler.spec.name
|
||||
)
|
||||
try:
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
|
||||
if hasattr(compiler_cls, "runtime_constraints"):
|
||||
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass
|
||||
|
||||
# Inject libc from available compilers, on Linux
|
||||
if not compiler.available:
|
||||
continue
|
||||
if hasattr(compiler_cls, "runtime_constraints"):
|
||||
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
|
||||
|
||||
current_libc = compiler.compiler_obj.default_libc
|
||||
# If this is a compiler yet to be built (config:install_missing_compilers:true)
|
||||
# infer libc from the Python process
|
||||
if not current_libc and compiler.compiler_obj.cc is None:
|
||||
current_libc = spack.util.libc.libc_from_current_python_process()
|
||||
|
||||
if using_libc_compatibility() and current_libc:
|
||||
recorder("*").depends_on(
|
||||
"libc", when=f"%{compiler.spec}", type="link", description="Add libc"
|
||||
)
|
||||
recorder("*").depends_on(
|
||||
str(current_libc),
|
||||
when=f"%{compiler.spec}",
|
||||
type="link",
|
||||
description="Add libc",
|
||||
)
|
||||
|
||||
recorder.consume_facts()
|
||||
|
||||
@@ -2840,6 +2906,97 @@ def reject_requirement_constraint(
|
||||
return False
|
||||
|
||||
|
||||
class CompilerParser:
|
||||
"""Parses configuration files, and builds a list of possible compilers for the solve."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
if using_libc_compatibility() and not c.default_libc:
|
||||
warnings.warn(
|
||||
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
||||
f"during concretization."
|
||||
)
|
||||
continue
|
||||
|
||||
target = c.target if c.target != "any" else None
|
||||
candidate = KnownCompiler(
|
||||
spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c
|
||||
)
|
||||
if candidate in self.compilers:
|
||||
warnings.warn(
|
||||
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
|
||||
"""Accounts for input specs when building the list of possible compilers.
|
||||
|
||||
Args:
|
||||
input_specs: specs to be concretized
|
||||
"""
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
default_os = str(spack.platforms.host().default_os)
|
||||
default_target = str(archspec.cpu.host().family)
|
||||
for s in traverse.traverse_nodes(input_specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compiler_obj = compiler_cls(
|
||||
s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4
|
||||
)
|
||||
self.compilers.add(
|
||||
KnownCompiler(
|
||||
spec=s.compiler,
|
||||
os=default_os,
|
||||
target=default_target,
|
||||
available=True,
|
||||
compiler_obj=compiler_obj,
|
||||
)
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
|
||||
"""Account for compilers that are coming from concrete specs, through reuse.
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be reused
|
||||
"""
|
||||
assert spec.concrete, "the spec argument must be concrete"
|
||||
candidate = KnownCompiler(
|
||||
spec=spec.compiler,
|
||||
os=str(spec.architecture.os),
|
||||
target=str(spec.architecture.target.microarchitecture.family),
|
||||
available=False,
|
||||
compiler_obj=None,
|
||||
)
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def possible_compilers(self) -> List[KnownCompiler]:
|
||||
# Here we have to sort two times, first sort by name and ascending version
|
||||
result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True)
|
||||
# Then stable sort to prefer available compilers and account for preferences
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
result.sort(key=lambda x: (not x.available, ppk(x.spec)))
|
||||
return result
|
||||
|
||||
|
||||
class RuntimePropertyRecorder:
|
||||
"""An object of this class is injected in callbacks to compilers, to let them declare
|
||||
properties of the runtimes they support and of the runtimes they provide, and to add
|
||||
@@ -3126,12 +3283,8 @@ def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
"""This means that the external spec and index idx has been selected for this package."""
|
||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
|
||||
self._specs[node].external_path = spec_info.get("prefix", None)
|
||||
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
|
||||
@@ -3179,7 +3332,9 @@ def reorder_flags(self):
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
|
||||
compilers = dict(
|
||||
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
|
||||
)
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
for spec in self._specs.values():
|
||||
@@ -3427,7 +3582,7 @@ def __init__(self):
|
||||
|
||||
# These properties are settable via spack configuration, and overridable
|
||||
# by setting them directly as properties.
|
||||
self.reuse = spack.config.get("concretizer:reuse", False)
|
||||
self.reuse = spack.config.get("concretizer:reuse", True)
|
||||
|
||||
@staticmethod
|
||||
def _check_input_and_extract_concrete_specs(specs):
|
||||
@@ -3444,7 +3599,7 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
def _reusable_specs(self, specs):
|
||||
reusable_specs = []
|
||||
if self.reuse:
|
||||
packages = spack.config.get("packages")
|
||||
packages = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
# Specs from the local Database
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
reusable_specs.extend(
|
||||
|
@@ -127,10 +127,12 @@ trigger_node(TriggerID, Node, Node) :-
|
||||
trigger_condition_holds(TriggerID, Node),
|
||||
literal(TriggerID).
|
||||
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct
|
||||
% the condition_set/2 manually below
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
|
||||
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
|
||||
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
mentioned_in_literal(Root, Mentioned), Mentioned != Root.
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
@@ -138,6 +140,20 @@ explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
imposed_constraint(EffectID, "root", Package).
|
||||
|
||||
|
||||
% Keep track of which nodes are associated with which root DAG
|
||||
associated_with_root(RootNode, RootNode) :- attr("root", RootNode).
|
||||
|
||||
associated_with_root(RootNode, ChildNode) :-
|
||||
depends_on(ParentNode, ChildNode),
|
||||
associated_with_root(RootNode, ParentNode).
|
||||
|
||||
% We cannot have a node in the root condition set, that is not associated with that root
|
||||
:- attr("root", RootNode),
|
||||
condition_set(RootNode, node(X, Package)),
|
||||
not virtual(Package),
|
||||
not associated_with_root(RootNode, node(X, Package)).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
|
||||
@@ -523,6 +539,12 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
provider(ProviderNode, node(_, Virtual)),
|
||||
not external(PackageNode).
|
||||
|
||||
% If a virtual node is in the answer set, it must be either a virtual root,
|
||||
% or used somewhere
|
||||
:- attr("virtual_node", node(_, Virtual)),
|
||||
not attr("virtual_on_incoming_edges", _, Virtual),
|
||||
not attr("virtual_root", node(_, Virtual)).
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
|
||||
@@ -885,12 +907,8 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
|
||||
Set1 < Set2, % see[1]
|
||||
build(node(ID, Package)).
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
attr("variant_value", PackageNode, Variant, Value)
|
||||
:- attr("node", PackageNode),
|
||||
node_has_variant(PackageNode, Variant),
|
||||
attr("variant_set", PackageNode, Variant, Value).
|
||||
:- attr("variant_set", node(ID, Package), Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value).
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
@@ -975,14 +993,13 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
||||
% Platform semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if no platform is set, fall back to the default
|
||||
error(100, "platform '{0}' is not allowed on the current host", Platform)
|
||||
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
|
||||
% NOTE: Currently we have a single allowed platform per DAG, therefore there is no
|
||||
% need to have additional optimization criteria. If we ever add cross-platform dags,
|
||||
% this needs to be changed.
|
||||
:- 2 { allowed_platform(Platform) }, internal_error("More than one allowed platform detected").
|
||||
|
||||
attr("node_platform", PackageNode, Platform)
|
||||
:- attr("node", PackageNode),
|
||||
not attr("node_platform_set", PackageNode),
|
||||
node_platform_default(Platform).
|
||||
1 { attr("node_platform", PackageNode, Platform) : allowed_platform(Platform) } 1
|
||||
:- attr("node", PackageNode).
|
||||
|
||||
% setting platform on a node is a hard constraint
|
||||
attr("node_platform", PackageNode, Platform)
|
||||
@@ -1006,14 +1023,6 @@ error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)
|
||||
attr("node_os", node(X, Package), OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
|
||||
:- depends_on(node(X, Package), node(Y, Dependency)),
|
||||
attr("node_os", node(X, Package), PackageNodeOS),
|
||||
attr("node_os", node(Y, Dependency), DependencyOS),
|
||||
not os_compatible(PackageNodeOS, DependencyOS),
|
||||
build(node(X, Package)).
|
||||
|
||||
% give OS choice weights according to os declarations
|
||||
node_os_weight(PackageNode, Weight)
|
||||
:- attr("node", PackageNode),
|
||||
@@ -1026,13 +1035,6 @@ os_compatible(OS, OS) :- os(OS).
|
||||
% Transitive compatibility among operating systems
|
||||
os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
|
||||
|
||||
% We can select only operating systems compatible with the ones
|
||||
% for which we can build software. We need a cardinality constraint
|
||||
% since we might have more than one "buildable_os(OS)" fact.
|
||||
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
|
||||
attr("node_os", Package, ReusedOS),
|
||||
internal_error("Reused OS incompatible with build OS").
|
||||
|
||||
% If an OS is set explicitly respect the value
|
||||
attr("node_os", PackageNode, OS) :- attr("node_os_set", PackageNode, OS), attr("node", PackageNode).
|
||||
|
||||
@@ -1080,6 +1082,9 @@ error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Tar
|
||||
compiler_version(CompilerID, Version),
|
||||
build(node(X, Package)).
|
||||
|
||||
#defined compiler_supports_target/2.
|
||||
#defined compiler_available/1.
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
attr("node_target", PackageNode, Target)
|
||||
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
||||
@@ -1110,7 +1115,7 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ
|
||||
% Compiler semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
% There must be only one compiler set per built node.
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID) } :-
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :-
|
||||
attr("node", PackageNode),
|
||||
build(PackageNode).
|
||||
|
||||
@@ -1127,6 +1132,7 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
|
||||
:- node_compiler(PackageNode, CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, CompilerVersion),
|
||||
compiler_available(CompilerID),
|
||||
build(PackageNode).
|
||||
|
||||
attr("node_compiler", PackageNode, CompilerName)
|
||||
@@ -1189,7 +1195,6 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
|
||||
compiler_version(CompilerID, Version),
|
||||
compiler_os(CompilerID, CompilerOS),
|
||||
not os_compatible(CompilerOS, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(node(X, Package)).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
@@ -1210,7 +1215,6 @@ compiler_mismatch_required(PackageNode, DependencyNode)
|
||||
not compiler_match(PackageNode, DependencyNode).
|
||||
|
||||
#defined compiler_os/3.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
node_compiler_weight(node(ID, Package), Weight)
|
||||
|
37
lib/spack/spack/solver/libc_compatibility.lp
Normal file
37
lib/spack/spack/solver/libc_compatibility.lp
Normal file
@@ -0,0 +1,37 @@
|
||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Libc compatibility rules for reusing solves.
|
||||
%
|
||||
% These rules are used on Linux
|
||||
%=============================================================================
|
||||
|
||||
% A package cannot be reused if the libc is not compatible with it
|
||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
attr("hash", node(R, ReusedPackage), Hash),
|
||||
% Libc packages can be reused without the "compatible_libc" attribute
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
|
||||
% Check whether the DAG has any built package
|
||||
has_built_packages() :- build(X), not external(X).
|
||||
|
||||
% A libc is needed in the DAG
|
||||
:- has_built_packages(), not provider(_, node(0, "libc")).
|
||||
|
||||
% The libc must be chosen among available ones
|
||||
:- has_built_packages(),
|
||||
provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("node", node(X, LibcPackage)),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
not allowed_libc(LibcPackage, LibcVersion).
|
||||
|
||||
% A built node must depend on libc
|
||||
:- build(PackageNode),
|
||||
provider(LibcNode, node(0, "libc")),
|
||||
not external(PackageNode),
|
||||
not depends_on(PackageNode, LibcNode).
|
@@ -7,21 +7,26 @@
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
%
|
||||
% These rules are used on every platform, but Linux
|
||||
%=============================================================================
|
||||
|
||||
% macOS
|
||||
os_compatible("sonoma", "ventura").
|
||||
os_compatible("ventura", "monterey").
|
||||
os_compatible("monterey", "bigsur").
|
||||
os_compatible("bigsur", "catalina").
|
||||
|
||||
% Ubuntu
|
||||
os_compatible("ubuntu22.04", "ubuntu21.10").
|
||||
os_compatible("ubuntu21.10", "ubuntu21.04").
|
||||
os_compatible("ubuntu21.04", "ubuntu20.10").
|
||||
os_compatible("ubuntu20.10", "ubuntu20.04").
|
||||
os_compatible("ubuntu20.04", "ubuntu19.10").
|
||||
os_compatible("ubuntu19.10", "ubuntu19.04").
|
||||
os_compatible("ubuntu19.04", "ubuntu18.10").
|
||||
os_compatible("ubuntu18.10", "ubuntu18.04").
|
||||
% can't have dependencies on incompatible OS's
|
||||
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
|
||||
:- depends_on(node(X, Package), node(Y, Dependency)),
|
||||
attr("node_os", node(X, Package), PackageNodeOS),
|
||||
attr("node_os", node(Y, Dependency), DependencyOS),
|
||||
not os_compatible(PackageNodeOS, DependencyOS),
|
||||
build(node(X, Package)).
|
||||
|
||||
%EL8
|
||||
os_compatible("rhel8", "rocky8").
|
||||
% We can select only operating systems compatible with the ones
|
||||
% for which we can build software. We need a cardinality constraint
|
||||
% since we might have more than one "buildable_os(OS)" fact.
|
||||
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
|
||||
attr("node_os", Package, ReusedOS).
|
||||
|
@@ -51,7 +51,6 @@
|
||||
import collections
|
||||
import collections.abc
|
||||
import enum
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
@@ -59,7 +58,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.string
|
||||
@@ -121,36 +120,44 @@
|
||||
"SpecDeprecatedError",
|
||||
]
|
||||
|
||||
|
||||
SPEC_FORMAT_RE = re.compile(
|
||||
r"(?:" # this is one big or, with matches ordered by priority
|
||||
# OPTION 1: escaped character (needs to be first to catch opening \{)
|
||||
# Note that an unterminated \ at the end of a string is left untouched
|
||||
r"(?:\\(.))"
|
||||
r"|" # or
|
||||
# OPTION 2: an actual format string
|
||||
r"{" # non-escaped open brace {
|
||||
r"([%@/]|arch=)?" # optional sigil (to print sigil in color)
|
||||
r"(?:\^([^}\.]+)\.)?" # optional ^depname. (to get attr from dependency)
|
||||
# after the sigil or depname, we can have a hash expression or another attribute
|
||||
r"(?:" # one of
|
||||
r"(hash\b)(?:\:(\d+))?" # hash followed by :<optional length>
|
||||
r"|" # or
|
||||
r"([^}]*)" # another attribute to format
|
||||
r")" # end one of
|
||||
r"(})?" # finish format string with non-escaped close brace }, or missing if not present
|
||||
r"|"
|
||||
# OPTION 3: mismatched close brace (option 2 would consume a matched open brace)
|
||||
r"(})" # brace
|
||||
r")",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
#: Valid pattern for an identifier in Spack
|
||||
|
||||
IDENTIFIER_RE = r"\w[\w-]*"
|
||||
|
||||
# Coloring of specs when using color output. Fields are printed with
|
||||
# different colors to enhance readability.
|
||||
# See llnl.util.tty.color for descriptions of the color codes.
|
||||
COMPILER_COLOR = "@g" #: color for highlighting compilers
|
||||
VERSION_COLOR = "@c" #: color for highlighting versions
|
||||
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
|
||||
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
|
||||
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
|
||||
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
|
||||
VARIANT_COLOR = "@B" #: color for highlighting variants
|
||||
HASH_COLOR = "@K" #: color for highlighting package hashes
|
||||
|
||||
#: This map determines the coloring of specs when using color output.
|
||||
#: We make the fields different colors to enhance readability.
|
||||
#: See llnl.util.tty.color for descriptions of the color codes.
|
||||
COLOR_FORMATS = {
|
||||
"%": COMPILER_COLOR,
|
||||
"@": VERSION_COLOR,
|
||||
"=": ARCHITECTURE_COLOR,
|
||||
"+": ENABLED_VARIANT_COLOR,
|
||||
"~": DISABLED_VARIANT_COLOR,
|
||||
"^": DEPENDENCY_COLOR,
|
||||
"#": HASH_COLOR,
|
||||
}
|
||||
|
||||
#: Regex used for splitting by spec field separators.
|
||||
#: These need to be escaped to avoid metacharacters in
|
||||
#: ``COLOR_FORMATS.keys()``.
|
||||
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
|
||||
|
||||
#: Default format for Spec.format(). This format can be round-tripped, so that:
|
||||
#: Spec(Spec("string").format()) == Spec("string)"
|
||||
DEFAULT_FORMAT = (
|
||||
@@ -193,26 +200,7 @@ class InstallStatus(enum.Enum):
|
||||
missing = "@r{[-]} "
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
"""Returns a spec colorized according to the colors specified in
|
||||
COLOR_FORMATS."""
|
||||
|
||||
class insert_color:
|
||||
def __init__(self):
|
||||
self.last = None
|
||||
|
||||
def __call__(self, match):
|
||||
# ignore compiler versions (color same as compiler)
|
||||
sep = match.group(0)
|
||||
if self.last == "%" and sep == "@":
|
||||
return clr.cescape(sep)
|
||||
self.last = sep
|
||||
|
||||
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
|
||||
|
||||
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
|
||||
|
||||
|
||||
# regexes used in spec formatting
|
||||
OLD_STYLE_FMT_RE = re.compile(r"\${[A-Z]+}")
|
||||
|
||||
|
||||
@@ -911,6 +899,9 @@ def flags():
|
||||
yield flags
|
||||
|
||||
def __str__(self):
|
||||
if not self:
|
||||
return ""
|
||||
|
||||
sorted_items = sorted((k, v) for k, v in self.items() if v)
|
||||
|
||||
result = ""
|
||||
@@ -4292,10 +4283,7 @@ def deps():
|
||||
|
||||
yield deps
|
||||
|
||||
def colorized(self):
|
||||
return colorize_spec(self)
|
||||
|
||||
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
|
||||
def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = False) -> str:
|
||||
r"""Prints out particular pieces of a spec, depending on what is
|
||||
in the format string.
|
||||
|
||||
@@ -4358,79 +4346,65 @@ def format(self, format_string=DEFAULT_FORMAT, **kwargs):
|
||||
literal ``\`` character.
|
||||
|
||||
Args:
|
||||
format_string (str): string containing the format to be expanded
|
||||
|
||||
Keyword Args:
|
||||
color (bool): True if returned string is colored
|
||||
transform (dict): maps full-string formats to a callable \
|
||||
that accepts a string and returns another one
|
||||
|
||||
format_string: string containing the format to be expanded
|
||||
color: True for colorized result; False for no color; None for auto color.
|
||||
"""
|
||||
ensure_modern_format_string(format_string)
|
||||
color = kwargs.get("color", False)
|
||||
transform = kwargs.get("transform", {})
|
||||
|
||||
out = io.StringIO()
|
||||
def safe_color(sigil: str, string: str, color_fmt: Optional[str]) -> str:
|
||||
# avoid colorizing if there is no color or the string is empty
|
||||
if (color is False) or not color_fmt or not string:
|
||||
return sigil + string
|
||||
# escape and add the sigil here to avoid multiple concatenations
|
||||
if sigil == "@":
|
||||
sigil = "@@"
|
||||
return clr.colorize(f"{color_fmt}{sigil}{clr.cescape(string)}@.", color=color)
|
||||
|
||||
def write(s, c=None):
|
||||
f = clr.cescape(s)
|
||||
if c is not None:
|
||||
f = COLOR_FORMATS[c] + f + "@."
|
||||
clr.cwrite(f, stream=out, color=color)
|
||||
def format_attribute(match_object: Match) -> str:
|
||||
(esc, sig, dep, hash, hash_len, attribute, close_brace, unmatched_close_brace) = (
|
||||
match_object.groups()
|
||||
)
|
||||
if esc:
|
||||
return esc
|
||||
elif unmatched_close_brace:
|
||||
raise SpecFormatStringError(f"Unmatched close brace: '{format_string}'")
|
||||
elif not close_brace:
|
||||
raise SpecFormatStringError(f"Missing close brace: '{format_string}'")
|
||||
|
||||
def write_attribute(spec, attribute, color):
|
||||
attribute = attribute.lower()
|
||||
current = self if dep is None else self[dep]
|
||||
|
||||
sig = ""
|
||||
if attribute.startswith(("@", "%", "/")):
|
||||
# color sigils that are inside braces
|
||||
sig = attribute[0]
|
||||
attribute = attribute[1:]
|
||||
elif attribute.startswith("arch="):
|
||||
sig = " arch=" # include space as separator
|
||||
attribute = attribute[5:]
|
||||
|
||||
current = spec
|
||||
if attribute.startswith("^"):
|
||||
attribute = attribute[1:]
|
||||
dep, attribute = attribute.split(".", 1)
|
||||
current = self[dep]
|
||||
# Hash attributes can return early.
|
||||
# NOTE: we currently treat abstract_hash like an attribute and ignore
|
||||
# any length associated with it. We may want to change that.
|
||||
if hash:
|
||||
if sig and sig != "/":
|
||||
raise SpecFormatSigilError(sig, "DAG hashes", hash)
|
||||
try:
|
||||
length = int(hash_len) if hash_len else None
|
||||
except ValueError:
|
||||
raise SpecFormatStringError(f"Invalid hash length: '{hash_len}'")
|
||||
return safe_color(sig or "", current.dag_hash(length), HASH_COLOR)
|
||||
|
||||
if attribute == "":
|
||||
raise SpecFormatStringError("Format string attributes must be non-empty")
|
||||
|
||||
attribute = attribute.lower()
|
||||
parts = attribute.split(".")
|
||||
assert parts
|
||||
|
||||
# check that the sigil is valid for the attribute.
|
||||
if sig == "@" and parts[-1] not in ("versions", "version"):
|
||||
if not sig:
|
||||
sig = ""
|
||||
elif sig == "@" and parts[-1] not in ("versions", "version"):
|
||||
raise SpecFormatSigilError(sig, "versions", attribute)
|
||||
elif sig == "%" and attribute not in ("compiler", "compiler.name"):
|
||||
raise SpecFormatSigilError(sig, "compilers", attribute)
|
||||
elif sig == "/" and not re.match(r"(abstract_)?hash(:\d+)?$", attribute):
|
||||
elif sig == "/" and attribute != "abstract_hash":
|
||||
raise SpecFormatSigilError(sig, "DAG hashes", attribute)
|
||||
elif sig == " arch=" and attribute not in ("architecture", "arch"):
|
||||
raise SpecFormatSigilError(sig, "the architecture", attribute)
|
||||
|
||||
# find the morph function for our attribute
|
||||
morph = transform.get(attribute, lambda s, x: x)
|
||||
|
||||
# Special cases for non-spec attributes and hashes.
|
||||
# These must be the only non-dep component of the format attribute
|
||||
if attribute == "spack_root":
|
||||
write(morph(spec, spack.paths.spack_root))
|
||||
return
|
||||
elif attribute == "spack_install":
|
||||
write(morph(spec, spack.store.STORE.layout.root))
|
||||
return
|
||||
elif re.match(r"hash(:\d)?", attribute):
|
||||
col = "#"
|
||||
if ":" in attribute:
|
||||
_, length = attribute.split(":")
|
||||
write(sig + morph(spec, current.dag_hash(int(length))), col)
|
||||
else:
|
||||
write(sig + morph(spec, current.dag_hash()), col)
|
||||
return
|
||||
elif sig == "arch=":
|
||||
if attribute not in ("architecture", "arch"):
|
||||
raise SpecFormatSigilError(sig, "the architecture", attribute)
|
||||
sig = " arch=" # include space as separator
|
||||
|
||||
# Iterate over components using getattr to get next element
|
||||
for idx, part in enumerate(parts):
|
||||
@@ -4439,7 +4413,7 @@ def write_attribute(spec, attribute, color):
|
||||
if part.startswith("_"):
|
||||
raise SpecFormatStringError("Attempted to format private attribute")
|
||||
else:
|
||||
if isinstance(current, vt.VariantMap):
|
||||
if part == "variants" and isinstance(current, vt.VariantMap):
|
||||
# subscript instead of getattr for variant names
|
||||
current = current[part]
|
||||
else:
|
||||
@@ -4463,62 +4437,31 @@ def write_attribute(spec, attribute, color):
|
||||
raise SpecFormatStringError(m)
|
||||
if isinstance(current, vn.VersionList):
|
||||
if current == vn.any_version:
|
||||
# We don't print empty version lists
|
||||
return
|
||||
# don't print empty version lists
|
||||
return ""
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
|
||||
if current is None:
|
||||
# We're not printing anything
|
||||
return
|
||||
# not printing anything
|
||||
return ""
|
||||
|
||||
# Set color codes for various attributes
|
||||
col = None
|
||||
color = None
|
||||
if "variants" in parts:
|
||||
col = "+"
|
||||
color = VARIANT_COLOR
|
||||
elif "architecture" in parts:
|
||||
col = "="
|
||||
color = ARCHITECTURE_COLOR
|
||||
elif "compiler" in parts or "compiler_flags" in parts:
|
||||
col = "%"
|
||||
color = COMPILER_COLOR
|
||||
elif "version" in parts or "versions" in parts:
|
||||
col = "@"
|
||||
color = VERSION_COLOR
|
||||
|
||||
# Finally, write the output
|
||||
write(sig + morph(spec, str(current)), col)
|
||||
# return colored output
|
||||
return safe_color(sig, str(current), color)
|
||||
|
||||
attribute = ""
|
||||
in_attribute = False
|
||||
escape = False
|
||||
|
||||
for c in format_string:
|
||||
if escape:
|
||||
out.write(c)
|
||||
escape = False
|
||||
elif c == "\\":
|
||||
escape = True
|
||||
elif in_attribute:
|
||||
if c == "}":
|
||||
write_attribute(self, attribute, color)
|
||||
attribute = ""
|
||||
in_attribute = False
|
||||
else:
|
||||
attribute += c
|
||||
else:
|
||||
if c == "}":
|
||||
raise SpecFormatStringError(
|
||||
"Encountered closing } before opening { in %s" % format_string
|
||||
)
|
||||
elif c == "{":
|
||||
in_attribute = True
|
||||
else:
|
||||
out.write(c)
|
||||
if in_attribute:
|
||||
raise SpecFormatStringError(
|
||||
"Format string terminated while reading attribute." "Missing terminating }."
|
||||
)
|
||||
|
||||
formatted_spec = out.getvalue()
|
||||
return formatted_spec.strip()
|
||||
return SPEC_FORMAT_RE.sub(format_attribute, format_string).strip()
|
||||
|
||||
def cformat(self, *args, **kwargs):
|
||||
"""Same as format, but color defaults to auto instead of False."""
|
||||
@@ -4526,6 +4469,16 @@ def cformat(self, *args, **kwargs):
|
||||
kwargs.setdefault("color", None)
|
||||
return self.format(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def spack_root(self):
|
||||
"""Special field for using ``{spack_root}`` in Spec.format()."""
|
||||
return spack.paths.spack_root
|
||||
|
||||
@property
|
||||
def spack_install(self):
|
||||
"""Special field for using ``{spack_install}`` in Spec.format()."""
|
||||
return spack.store.STORE.layout.root
|
||||
|
||||
def format_path(
|
||||
# self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None
|
||||
self,
|
||||
@@ -4551,18 +4504,27 @@ def format_path(
|
||||
|
||||
path_ctor = _path_ctor or pathlib.PurePath
|
||||
format_string_as_path = path_ctor(format_string)
|
||||
if format_string_as_path.is_absolute():
|
||||
if format_string_as_path.is_absolute() or (
|
||||
# Paths that begin with a single "\" on windows are relative, but we still
|
||||
# want to preserve the initial "\\" to be consistent with PureWindowsPath.
|
||||
# Ensure that this '\' is not passed to polite_filename() so it's not converted to '_'
|
||||
(os.name == "nt" or path_ctor == pathlib.PureWindowsPath)
|
||||
and format_string_as_path.parts[0] == "\\"
|
||||
):
|
||||
output_path_components = [format_string_as_path.parts[0]]
|
||||
input_path_components = list(format_string_as_path.parts[1:])
|
||||
else:
|
||||
output_path_components = []
|
||||
input_path_components = list(format_string_as_path.parts)
|
||||
|
||||
output_path_components += [
|
||||
fs.polite_filename(self.format(x)) for x in input_path_components
|
||||
fs.polite_filename(self.format(part)) for part in input_path_components
|
||||
]
|
||||
return str(path_ctor(*output_path_components))
|
||||
|
||||
def __str__(self):
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
root_str = [self.format()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
|
@@ -390,11 +390,11 @@ def test_built_spec_cache(mirror_dir):
|
||||
assert any([r["spec"] == s for r in results])
|
||||
|
||||
|
||||
def fake_dag_hash(spec):
|
||||
def fake_dag_hash(spec, length=None):
|
||||
# Generate an arbitrary hash that is intended to be different than
|
||||
# whatever a Spec reported before (to test actions that trigger when
|
||||
# the hash changes)
|
||||
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"
|
||||
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
|
@@ -63,7 +63,8 @@ def build_environment(working_env):
|
||||
os.environ["SPACK_LINKER_ARG"] = "-Wl,"
|
||||
os.environ["SPACK_DTAGS_TO_ADD"] = "--disable-new-dtags"
|
||||
os.environ["SPACK_DTAGS_TO_STRIP"] = "--enable-new-dtags"
|
||||
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include /usr/lib"
|
||||
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include|/usr/lib"
|
||||
os.environ["SPACK_MANAGED_DIRS"] = f"{prefix}/opt/spack"
|
||||
os.environ["SPACK_TARGET_ARGS"] = ""
|
||||
|
||||
if "SPACK_DEPENDENCIES" in os.environ:
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.environment import SYSTEM_DIRS, set_env
|
||||
from spack.util.environment import SYSTEM_DIR_CASE_ENTRY, set_env
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
#
|
||||
@@ -127,7 +127,7 @@
|
||||
spack_cflags = ["-Wall"]
|
||||
spack_cxxflags = ["-Werror"]
|
||||
spack_fflags = ["-w"]
|
||||
spack_ldflags = ["-L", "foo"]
|
||||
spack_ldflags = ["-Wl,--gc-sections", "-L", "foo"]
|
||||
spack_ldlibs = ["-lfoo"]
|
||||
|
||||
lheaderpad = ["-Wl,-headerpad_max_install_names"]
|
||||
@@ -159,7 +159,8 @@ def wrapper_environment(working_env):
|
||||
SPACK_DEBUG_LOG_ID="foo-hashabc",
|
||||
SPACK_COMPILER_SPEC="gcc@4.4.7",
|
||||
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
|
||||
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
|
||||
SPACK_SYSTEM_DIRS=SYSTEM_DIR_CASE_ENTRY,
|
||||
SPACK_MANAGED_DIRS="/path/to/spack-1/opt/spack/*|/path/to/spack-2/opt/spack/*",
|
||||
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
|
||||
@@ -278,7 +279,6 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
["ld"]
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
@@ -306,13 +306,14 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ ["-Lfoo"]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cflags
|
||||
+ ["-Wl,--gc-sections"]
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -324,12 +325,13 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ ["-Lfoo"]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ ["-Wl,--gc-sections"]
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -341,13 +343,14 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ ["-Lfoo"]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_fflags
|
||||
+ spack_cppflags
|
||||
+ ["-Wl,--gc-sections"]
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -907,3 +910,108 @@ def test_linker_strips_loopopt(wrapper_environment, wrapper_flags):
|
||||
result = cc(*(test_args + ["-loopopt=0", "-c", "x.c"]), output=str)
|
||||
result = result.strip().split("\n")
|
||||
assert "-loopopt=0" in result
|
||||
|
||||
|
||||
def test_spack_managed_dirs_are_prioritized(wrapper_environment):
|
||||
# We have two different stores with 5 packages divided over them
|
||||
pkg1 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-1.0-abcdef"
|
||||
pkg2 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-2.0-abcdef"
|
||||
pkg3 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-3.0-abcdef"
|
||||
pkg4 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-4.0-abcdef"
|
||||
pkg5 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-5.0-abcdef"
|
||||
|
||||
variables = {
|
||||
# cppflags, ldflags from the command line, config or package.py take highest priority
|
||||
"SPACK_CPPFLAGS": f"-I/usr/local/include -I/external-1/include -I{pkg1}/include",
|
||||
"SPACK_LDFLAGS": f"-L/usr/local/lib -L/external-1/lib -L{pkg1}/lib "
|
||||
f"-Wl,-rpath,/usr/local/lib -Wl,-rpath,/external-1/lib -Wl,-rpath,{pkg1}/lib",
|
||||
# automatic -L, -Wl,-rpath, -I flags from dependencies -- on the spack side they are
|
||||
# already partitioned into "spack owned prefixes" and "non-spack owned prefixes"
|
||||
"SPACK_STORE_LINK_DIRS": f"{pkg4}/lib:{pkg5}/lib",
|
||||
"SPACK_STORE_RPATH_DIRS": f"{pkg4}/lib:{pkg5}/lib",
|
||||
"SPACK_STORE_INCLUDE_DIRS": f"{pkg4}/include:{pkg5}/include",
|
||||
"SPACK_LINK_DIRS": "/external-3/lib:/external-4/lib",
|
||||
"SPACK_RPATH_DIRS": "/external-3/lib:/external-4/lib",
|
||||
"SPACK_INCLUDE_DIRS": "/external-3/include:/external-4/include",
|
||||
}
|
||||
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args", **variables):
|
||||
effective_call = (
|
||||
cc(
|
||||
# system paths
|
||||
"-I/usr/include",
|
||||
"-L/usr/lib",
|
||||
"-Wl,-rpath,/usr/lib",
|
||||
# some other externals
|
||||
"-I/external-2/include",
|
||||
"-L/external-2/lib",
|
||||
"-Wl,-rpath,/external-2/lib",
|
||||
# relative paths are considered "spack managed" since they are in the stage dir
|
||||
"-I..",
|
||||
"-L..",
|
||||
"-Wl,-rpath,..", # pathological but simpler for the test.
|
||||
# spack store paths
|
||||
f"-I{pkg2}/include",
|
||||
f"-I{pkg3}/include",
|
||||
f"-L{pkg2}/lib",
|
||||
f"-L{pkg3}/lib",
|
||||
f"-Wl,-rpath,{pkg2}/lib",
|
||||
f"-Wl,-rpath,{pkg3}/lib",
|
||||
"hello.c",
|
||||
"-o",
|
||||
"hello",
|
||||
output=str,
|
||||
)
|
||||
.strip()
|
||||
.split("\n")
|
||||
)
|
||||
|
||||
dash_I = [flag[2:] for flag in effective_call if flag.startswith("-I")]
|
||||
dash_L = [flag[2:] for flag in effective_call if flag.startswith("-L")]
|
||||
dash_Wl_rpath = [flag[11:] for flag in effective_call if flag.startswith("-Wl,-rpath")]
|
||||
|
||||
assert dash_I == [
|
||||
# spack owned dirs from SPACK_*FLAGS
|
||||
f"{pkg1}/include",
|
||||
# spack owned dirs from command line & automatic flags for deps (in that order)]
|
||||
"..",
|
||||
f"{pkg2}/include", # from command line
|
||||
f"{pkg3}/include", # from command line
|
||||
f"{pkg4}/include", # from SPACK_STORE_INCLUDE_DIRS
|
||||
f"{pkg5}/include", # from SPACK_STORE_INCLUDE_DIRS
|
||||
# non-system dirs from SPACK_*FLAGS
|
||||
"/external-1/include",
|
||||
# non-system dirs from command line & automatic flags for deps (in that order)
|
||||
"/external-2/include", # from command line
|
||||
"/external-3/include", # from SPACK_INCLUDE_DIRS
|
||||
"/external-4/include", # from SPACK_INCLUDE_DIRS
|
||||
# system dirs from SPACK_*FLAGS
|
||||
"/usr/local/include",
|
||||
# system dirs from command line
|
||||
"/usr/include",
|
||||
]
|
||||
|
||||
assert (
|
||||
dash_L
|
||||
== dash_Wl_rpath
|
||||
== [
|
||||
# spack owned dirs from SPACK_*FLAGS
|
||||
f"{pkg1}/lib",
|
||||
# spack owned dirs from command line & automatic flags for deps (in that order)
|
||||
"..",
|
||||
f"{pkg2}/lib", # from command line
|
||||
f"{pkg3}/lib", # from command line
|
||||
f"{pkg4}/lib", # from SPACK_STORE_LINK_DIRS
|
||||
f"{pkg5}/lib", # from SPACK_STORE_LINK_DIRS
|
||||
# non-system dirs from SPACK_*FLAGS
|
||||
"/external-1/lib",
|
||||
# non-system dirs from command line & automatic flags for deps (in that order)
|
||||
"/external-2/lib", # from command line
|
||||
"/external-3/lib", # from SPACK_LINK_DIRS
|
||||
"/external-4/lib", # from SPACK_LINK_DIRS
|
||||
# system dirs from SPACK_*FLAGS
|
||||
"/usr/local/lib",
|
||||
# system dirs from command line
|
||||
"/usr/lib",
|
||||
]
|
||||
)
|
||||
|
@@ -169,6 +169,25 @@ def test_update_key_index(
|
||||
assert "index.json" in key_dir_list
|
||||
|
||||
|
||||
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
"""Test buildcache with autopush"""
|
||||
mirror_dir = tmp_path / "mirror"
|
||||
mirror_autopush_dir = tmp_path / "mirror_autopush"
|
||||
|
||||
mirror("add", "--unsigned", "mirror", mirror_dir.as_uri())
|
||||
mirror("add", "--autopush", "--unsigned", "mirror-autopush", mirror_autopush_dir.as_uri())
|
||||
|
||||
s = Spec("libdwarf").concretized()
|
||||
|
||||
# Install and generate build cache index
|
||||
s.package.do_install()
|
||||
|
||||
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
||||
|
||||
assert not (mirror_dir / "build_cache" / metadata_file).exists()
|
||||
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
|
||||
|
||||
|
||||
def test_buildcache_sync(
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
@@ -427,3 +446,10 @@ def test_push_and_install_with_mirror_marked_unsigned_does_not_require_extra_fla
|
||||
|
||||
spec.package.do_uninstall(force=True)
|
||||
spec.package.do_install(**kwargs)
|
||||
|
||||
|
||||
def test_skip_no_redistribute(mock_packages, config):
|
||||
specs = list(Spec("no-redistribute-dependent").concretized().traverse())
|
||||
filtered = spack.cmd.buildcache._skip_no_redistribute_for_public(specs)
|
||||
assert not any(s.name == "no-redistribute" for s in filtered)
|
||||
assert any(s.name == "no-redistribute-dependent" for s in filtered)
|
||||
|
@@ -117,13 +117,13 @@ def test_specs_staging(config, tmpdir):
|
||||
with repo.use_repositories(builder.root):
|
||||
spec_a = Spec("a").concretized()
|
||||
|
||||
spec_a_label = ci._spec_deps_key(spec_a)
|
||||
spec_b_label = ci._spec_deps_key(spec_a["b"])
|
||||
spec_c_label = ci._spec_deps_key(spec_a["c"])
|
||||
spec_d_label = ci._spec_deps_key(spec_a["d"])
|
||||
spec_e_label = ci._spec_deps_key(spec_a["e"])
|
||||
spec_f_label = ci._spec_deps_key(spec_a["f"])
|
||||
spec_g_label = ci._spec_deps_key(spec_a["g"])
|
||||
spec_a_label = ci._spec_ci_label(spec_a)
|
||||
spec_b_label = ci._spec_ci_label(spec_a["b"])
|
||||
spec_c_label = ci._spec_ci_label(spec_a["c"])
|
||||
spec_d_label = ci._spec_ci_label(spec_a["d"])
|
||||
spec_e_label = ci._spec_ci_label(spec_a["e"])
|
||||
spec_f_label = ci._spec_ci_label(spec_a["f"])
|
||||
spec_g_label = ci._spec_ci_label(spec_a["g"])
|
||||
|
||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||
|
||||
|
@@ -123,17 +123,18 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arg,config", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
|
||||
"arg,conf", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
|
||||
)
|
||||
def test_concretizer_arguments(mutable_config, mock_packages, arg, config):
|
||||
def test_concretizer_arguments(mutable_config, mock_packages, arg, conf):
|
||||
"""Ensure that ConfigSetAction is doing the right thing."""
|
||||
spec = spack.main.SpackCommand("spec")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is None
|
||||
assert spack.config.get("concretizer:reuse", None, scope="command_line") is None
|
||||
|
||||
spec(arg, "zlib")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) == config
|
||||
assert spack.config.get("concretizer:reuse", None) == conf
|
||||
assert spack.config.get("concretizer:reuse", None, scope="command_line") == conf
|
||||
|
||||
|
||||
def test_use_buildcache_type():
|
||||
|
@@ -175,7 +175,9 @@ def test_compiler_find_mixed_suffixes(
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
@@ -210,7 +212,9 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
@@ -229,7 +233,9 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
|
@@ -858,8 +858,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(env_root)
|
||||
with e:
|
||||
with ev.Environment(env_root) as e:
|
||||
e.concretize()
|
||||
|
||||
# we've created an environment with some included config files (which do
|
||||
@@ -869,7 +868,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
os.remove(abs_include_path)
|
||||
os.remove(include1)
|
||||
with pytest.raises(spack.config.ConfigFileError) as exc:
|
||||
ev.activate(e)
|
||||
ev.activate(ev.Environment(env_root))
|
||||
|
||||
err = exc.value.message
|
||||
assert "missing include" in err
|
||||
@@ -1063,8 +1062,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(tmp_path)
|
||||
with e:
|
||||
with ev.Environment(tmp_path):
|
||||
config("change", "packages:mpich:require:~debug")
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
|
||||
spack.spec.Spec("mpich+debug").concretized()
|
||||
@@ -1081,7 +1079,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
|
||||
require: "@3.0.3"
|
||||
"""
|
||||
)
|
||||
with e:
|
||||
with ev.Environment(tmp_path):
|
||||
assert spack.spec.Spec("mpich").concretized().satisfies("@3.0.3")
|
||||
with pytest.raises(spack.config.ConfigError, match="not a list"):
|
||||
config("change", "packages:mpich:require:~debug")
|
||||
|
@@ -88,6 +88,7 @@ def __init__(
|
||||
exclude_file=None,
|
||||
exclude_specs=None,
|
||||
directory=None,
|
||||
private=False,
|
||||
):
|
||||
self.specs = specs or []
|
||||
self.all = all
|
||||
@@ -96,6 +97,7 @@ def __init__(
|
||||
self.dependencies = dependencies
|
||||
self.exclude_file = exclude_file
|
||||
self.exclude_specs = exclude_specs
|
||||
self.private = private
|
||||
self.directory = directory
|
||||
|
||||
|
||||
@@ -104,7 +106,7 @@ def test_exclude_specs(mock_packages, config):
|
||||
specs=["mpich"], versions_per_spec="all", exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0"
|
||||
)
|
||||
|
||||
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
|
||||
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
|
||||
expected_include = set(
|
||||
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
|
||||
)
|
||||
@@ -113,6 +115,19 @@ def test_exclude_specs(mock_packages, config):
|
||||
assert not any(spec.satisfies(y) for spec in mirror_specs for y in expected_exclude)
|
||||
|
||||
|
||||
def test_exclude_specs_public_mirror(mock_packages, config):
|
||||
args = MockMirrorArgs(
|
||||
specs=["no-redistribute-dependent"],
|
||||
versions_per_spec="all",
|
||||
dependencies=True,
|
||||
private=False,
|
||||
)
|
||||
|
||||
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
|
||||
assert not any(s.name == "no-redistribute" for s in mirror_specs)
|
||||
assert any(s.name == "no-redistribute-dependent" for s in mirror_specs)
|
||||
|
||||
|
||||
def test_exclude_file(mock_packages, tmpdir, config):
|
||||
exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
|
||||
with open(exclude_path, "w") as exclude_file:
|
||||
@@ -125,7 +140,7 @@ def test_exclude_file(mock_packages, tmpdir, config):
|
||||
|
||||
args = MockMirrorArgs(specs=["mpich"], versions_per_spec="all", exclude_file=exclude_path)
|
||||
|
||||
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
|
||||
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
|
||||
expected_include = set(
|
||||
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
|
||||
)
|
||||
@@ -262,11 +277,9 @@ def test_mirror_destroy(
|
||||
class TestMirrorCreate:
|
||||
@pytest.mark.regression("31736", "31985")
|
||||
def test_all_specs_with_all_versions_dont_concretize(self):
|
||||
args = MockMirrorArgs(exclude_file=None, exclude_specs=None)
|
||||
specs = spack.cmd.mirror.all_specs_with_all_versions(
|
||||
selection_fn=spack.cmd.mirror.not_excluded_fn(args)
|
||||
)
|
||||
assert all(not s.concrete for s in specs)
|
||||
args = MockMirrorArgs(all=True, exclude_file=None, exclude_specs=None)
|
||||
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
|
||||
assert all(not s.concrete for s in mirror_specs)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cli_args,error_str",
|
||||
@@ -324,8 +337,8 @@ def test_error_conditions(self, cli_args, error_str):
|
||||
],
|
||||
)
|
||||
def test_exclude_specs_from_user(self, cli_args, not_expected, config):
|
||||
specs = spack.cmd.mirror.concrete_specs_from_user(MockMirrorArgs(**cli_args))
|
||||
assert not any(s.satisfies(y) for s in specs for y in not_expected)
|
||||
mirror_specs, _ = spack.cmd.mirror._specs_and_action(MockMirrorArgs(**cli_args))
|
||||
assert not any(s.satisfies(y) for s in mirror_specs for y in not_expected)
|
||||
|
||||
@pytest.mark.parametrize("abstract_specs", [("bowtie", "callpath")])
|
||||
def test_specs_from_cli_are_the_same_as_from_file(self, abstract_specs, config, tmpdir):
|
||||
@@ -407,3 +420,27 @@ def test_mirror_add_set_signed(mutable_config):
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": False}
|
||||
mirror("set", "--signed", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
|
||||
|
||||
|
||||
def test_mirror_add_set_autopush(mutable_config):
|
||||
# Add mirror without autopush
|
||||
mirror("add", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == "http://example.com"
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("remove", "example")
|
||||
|
||||
# Add mirror with autopush
|
||||
mirror("add", "--autopush", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("remove", "example")
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.compilers
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.module_cmd
|
||||
from spack.compiler import Compiler
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -137,14 +138,6 @@ def __init__(self):
|
||||
environment={},
|
||||
)
|
||||
|
||||
def _get_compiler_link_paths(self):
|
||||
# Mock os.path.isdir so the link paths don't have to exist
|
||||
old_isdir = os.path.isdir
|
||||
os.path.isdir = lambda x: True
|
||||
ret = super()._get_compiler_link_paths()
|
||||
os.path.isdir = old_isdir
|
||||
return ret
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "mockcompiler"
|
||||
@@ -162,34 +155,25 @@ def verbose_flag(self):
|
||||
required_libs = ["libgfortran"]
|
||||
|
||||
|
||||
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_implicit_rpaths(dirs_with_libfiles):
|
||||
lib_to_dirs, all_dirs = dirs_with_libfiles
|
||||
|
||||
def try_all_dirs(*args):
|
||||
return all_dirs
|
||||
|
||||
monkeypatch.setattr(MockCompiler, "_get_compiler_link_paths", try_all_dirs)
|
||||
|
||||
expected_rpaths = set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
|
||||
|
||||
compiler = MockCompiler()
|
||||
compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs)
|
||||
retrieved_rpaths = compiler.implicit_rpaths()
|
||||
assert set(retrieved_rpaths) == expected_rpaths
|
||||
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
|
||||
|
||||
|
||||
no_flag_dirs = ["/path/to/first/lib", "/path/to/second/lib64"]
|
||||
no_flag_output = "ld -L%s -L%s" % tuple(no_flag_dirs)
|
||||
|
||||
flag_dirs = ["/path/to/first/with/flag/lib", "/path/to/second/lib64"]
|
||||
flag_output = "ld -L%s -L%s" % tuple(flag_dirs)
|
||||
without_flag_output = "ld -L/path/to/first/lib -L/path/to/second/lib64"
|
||||
with_flag_output = "ld -L/path/to/first/with/flag/lib -L/path/to/second/lib64"
|
||||
|
||||
|
||||
def call_compiler(exe, *args, **kwargs):
|
||||
# This method can replace Executable.__call__ to emulate a compiler that
|
||||
# changes libraries depending on a flag.
|
||||
if "--correct-flag" in exe.exe:
|
||||
return flag_output
|
||||
return no_flag_output
|
||||
return with_flag_output
|
||||
return without_flag_output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@@ -203,8 +187,8 @@ def call_compiler(exe, *args, **kwargs):
|
||||
("cc", "cppflags"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
def test_get_compiler_link_paths(monkeypatch, exe, flagname):
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compile_dummy_c_source_adds_flags(monkeypatch, exe, flagname):
|
||||
# create fake compiler that emits mock verbose output
|
||||
compiler = MockCompiler()
|
||||
monkeypatch.setattr(Executable, "__call__", call_compiler)
|
||||
@@ -221,40 +205,38 @@ def test_get_compiler_link_paths(monkeypatch, exe, flagname):
|
||||
assert False
|
||||
|
||||
# Test without flags
|
||||
assert compiler._get_compiler_link_paths() == no_flag_dirs
|
||||
assert compiler._compile_dummy_c_source() == without_flag_output
|
||||
|
||||
if flagname:
|
||||
# set flags and test
|
||||
compiler.flags = {flagname: ["--correct-flag"]}
|
||||
assert compiler._get_compiler_link_paths() == flag_dirs
|
||||
assert compiler._compile_dummy_c_source() == with_flag_output
|
||||
|
||||
|
||||
def test_get_compiler_link_paths_no_path():
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compile_dummy_c_source_no_path():
|
||||
compiler = MockCompiler()
|
||||
compiler.cc = None
|
||||
compiler.cxx = None
|
||||
compiler.f77 = None
|
||||
compiler.fc = None
|
||||
assert compiler._get_compiler_link_paths() == []
|
||||
assert compiler._compile_dummy_c_source() is None
|
||||
|
||||
|
||||
def test_get_compiler_link_paths_no_verbose_flag():
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compile_dummy_c_source_no_verbose_flag():
|
||||
compiler = MockCompiler()
|
||||
compiler._verbose_flag = None
|
||||
assert compiler._get_compiler_link_paths() == []
|
||||
assert compiler._compile_dummy_c_source() is None
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compile_dummy_c_source_load_env(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
with open(gcc, "w") as f:
|
||||
f.write(
|
||||
"""#!/bin/sh
|
||||
f"""#!/bin/sh
|
||||
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
|
||||
echo '"""
|
||||
+ no_flag_output
|
||||
+ """'
|
||||
printf '{without_flag_output}'
|
||||
fi
|
||||
"""
|
||||
)
|
||||
@@ -274,7 +256,7 @@ def module(*args):
|
||||
compiler.environment = {"set": {"ENV_SET": "1"}}
|
||||
compiler.modules = ["turn_on"]
|
||||
|
||||
assert compiler._get_compiler_link_paths() == no_flag_dirs
|
||||
assert compiler._compile_dummy_c_source() == without_flag_output
|
||||
|
||||
|
||||
# Get the desired flag from the specified compiler spec.
|
||||
@@ -700,7 +682,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
]
|
||||
arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
|
||||
with spack.config.override("compilers", compilers):
|
||||
cfg = spack.compilers.get_compiler_config()
|
||||
cfg = spack.compilers.get_compiler_config(config)
|
||||
with pytest.raises(ValueError):
|
||||
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
|
||||
|
||||
@@ -912,3 +894,52 @@ def prepare_executable(name):
|
||||
# Test that null entries don't fail
|
||||
compiler.cc = None
|
||||
compiler.verify_executables()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"detected_versions,expected_length",
|
||||
[
|
||||
# If we detect a C compiler we expect the result to be valid
|
||||
(
|
||||
[
|
||||
spack.compilers.DetectVersionArgs(
|
||||
id=spack.compilers.CompilerID(
|
||||
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
|
||||
),
|
||||
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
|
||||
language="cc",
|
||||
path="/usr/bin/clang-12",
|
||||
),
|
||||
spack.compilers.DetectVersionArgs(
|
||||
id=spack.compilers.CompilerID(
|
||||
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
|
||||
),
|
||||
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
|
||||
language="cxx",
|
||||
path="/usr/bin/clang++-12",
|
||||
),
|
||||
],
|
||||
1,
|
||||
),
|
||||
# If we detect only a C++ compiler we expect the result to be discarded
|
||||
(
|
||||
[
|
||||
spack.compilers.DetectVersionArgs(
|
||||
id=spack.compilers.CompilerID(
|
||||
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
|
||||
),
|
||||
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
|
||||
language="cxx",
|
||||
path="/usr/bin/clang++-12",
|
||||
)
|
||||
],
|
||||
0,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_detection_requires_c_compiler(detected_versions, expected_length):
|
||||
"""Tests that compilers automatically added to the configuration have
|
||||
at least a C compiler.
|
||||
"""
|
||||
result = spack.compilers.make_compiler_list(detected_versions)
|
||||
assert len(result) == expected_length
|
||||
|
@@ -13,6 +13,7 @@
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -23,6 +24,7 @@
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.util.libc
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
@@ -67,6 +69,24 @@ def check_concretize(abstract_spec):
|
||||
return concrete
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def binary_compatibility(monkeypatch, request):
|
||||
"""Selects whether we use OS compatibility for binaries, or libc compatibility."""
|
||||
if spack.platforms.real_host().name != "linux":
|
||||
return
|
||||
|
||||
if "mock_packages" not in request.fixturenames:
|
||||
# Only builtin.mock has a mock glibc package
|
||||
return
|
||||
|
||||
if "database" in request.fixturenames or "mutable_database" in request.fixturenames:
|
||||
# Databases have been created without glibc support
|
||||
return
|
||||
|
||||
monkeypatch.setattr(spack.solver.asp, "using_libc_compatibility", lambda: True)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", Spec("glibc@=2.28"))
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
params=[
|
||||
# no_deps
|
||||
@@ -120,14 +140,16 @@ def current_host(request, monkeypatch):
|
||||
# is_preference is not empty if we want to supply the
|
||||
# preferred target via packages.yaml
|
||||
cpu, _, is_preference = request.param.partition("-")
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
|
||||
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
|
||||
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
|
||||
if not is_preference:
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
yield target
|
||||
else:
|
||||
target = archspec.cpu.TARGETS["sapphirerapids"]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
with spack.config.override("packages:all", {"target": [cpu]}):
|
||||
yield target
|
||||
|
||||
@@ -874,7 +896,7 @@ def test_concretize_anonymous_dep(self, spec_str):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_str",
|
||||
[
|
||||
# Unconstrained versions select default compiler (gcc@4.5.0)
|
||||
# Unconstrained versions select default compiler (gcc@10.2.1)
|
||||
("bowtie@1.4.0", "%gcc@10.2.1"),
|
||||
# Version with conflicts and no valid gcc select another compiler
|
||||
("bowtie@1.3.0", "%clang@15.0.0"),
|
||||
@@ -1012,7 +1034,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
|
||||
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
|
||||
packages_yaml = {
|
||||
"all": {"compiler": ["clang", "gcc"]},
|
||||
"cmake": {
|
||||
@@ -1020,7 +1042,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
"buildable": False,
|
||||
},
|
||||
}
|
||||
spack.config.set("packages", packages_yaml)
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert s.external
|
||||
@@ -1325,6 +1347,9 @@ def mock_fn(*args, **kwargs):
|
||||
def test_reuse_installed_packages_when_package_def_changes(
|
||||
self, context, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
# test applies only with reuse turned off in concretizer
|
||||
spack.config.set("concretizer:reuse", False)
|
||||
|
||||
# Install a spec
|
||||
root = Spec("root").concretized()
|
||||
dependency = root["changing"].copy()
|
||||
@@ -1348,6 +1373,22 @@ def test_reuse_installed_packages_when_package_def_changes(
|
||||
# Structure and package hash will be different without reuse
|
||||
assert root.dag_hash() != new_root_without_reuse.dag_hash()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@pytest.mark.regression("43663")
|
||||
def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, mock_packages):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
|
||||
# Install a spec for which the `version_based` variant condition does not hold
|
||||
old = Spec("conditional-variant-pkg @1").concretized()
|
||||
old.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# Then explicitly require a spec with `+version_based`, which shouldn't reuse previous spec
|
||||
new1 = Spec("conditional-variant-pkg +version_based").concretized()
|
||||
assert new1.satisfies("@2 +version_based")
|
||||
|
||||
new2 = Spec("conditional-variant-pkg +two_whens").concretized()
|
||||
assert new2.satisfies("@2 +two_whens +version_based")
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_with_flags(self, mutable_database, mutable_config):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
@@ -1434,6 +1475,8 @@ def test_os_selection_when_multiple_choices_are_possible(
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
for node in s.traverse():
|
||||
if node.name == "glibc":
|
||||
continue
|
||||
assert node.satisfies(expected_os)
|
||||
|
||||
@pytest.mark.regression("22718")
|
||||
@@ -1746,7 +1789,8 @@ def test_best_effort_coconcretize(self, specs, expected):
|
||||
for s in result.specs:
|
||||
concrete_specs.update(s.traverse())
|
||||
|
||||
assert len(concrete_specs) == expected
|
||||
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
|
||||
assert len(concrete_specs) == expected + libc_offset
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specs,expected_spec,occurances",
|
||||
@@ -1866,32 +1910,16 @@ def test_version_weight_and_provenance(self):
|
||||
result_spec = result.specs[0]
|
||||
num_specs = len(list(result_spec.traverse()))
|
||||
|
||||
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
|
||||
criteria = [
|
||||
(num_specs - 1, None, "number of packages to build (vs. reuse)"),
|
||||
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
|
||||
(2, 0, "version badness"),
|
||||
]
|
||||
|
||||
for criterion in criteria:
|
||||
assert criterion in result.criteria
|
||||
assert criterion in result.criteria, result_spec
|
||||
assert result_spec.satisfies("^b@1.0")
|
||||
|
||||
@pytest.mark.regression("31169")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
|
||||
wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
|
||||
reusable_specs = [wrong_compiler, wrong_os]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies("%{}".format(s.compiler))
|
||||
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_succeeds_with_config_compatible_os(self):
|
||||
root_spec = Spec("b")
|
||||
@@ -2096,7 +2124,11 @@ def test_external_python_extension_find_dependency_from_installed(self, monkeypa
|
||||
|
||||
# install python external
|
||||
python = Spec("python").concretized()
|
||||
monkeypatch.setattr(spack.store.STORE.db, "query", lambda x: [python])
|
||||
|
||||
def query(*args, **kwargs):
|
||||
return [python]
|
||||
|
||||
monkeypatch.setattr(spack.store.STORE.db, "query", query)
|
||||
|
||||
# ensure that we can't be faking this by getting it from config
|
||||
external_conf.pop("python")
|
||||
@@ -2360,6 +2392,62 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
assert s[name].concrete
|
||||
assert s[name].namespace == namespace
|
||||
|
||||
@pytest.mark.only_clingo("Old concretizer cannot reuse")
|
||||
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
|
||||
"""Tests that we can reuse specs with compilers that are not configured locally."""
|
||||
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
|
||||
specs = mutable_database.query_local()
|
||||
assert all(s.satisfies("%gcc@=10.2.1") for s in specs)
|
||||
|
||||
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
|
||||
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
|
||||
mutable_config.set("concretizer:reuse", True)
|
||||
|
||||
# mpileaks is in the database, it will be reused with gcc@=10.2.1
|
||||
root = Spec("mpileaks").concretized()
|
||||
for s in root.traverse():
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
# fftw is not in the database, therefore the root will be compiled with gcc@=9.4.0,
|
||||
# while the mpi is reused from the database and is compiled with gcc@=10.2.1
|
||||
root = Spec("fftw").concretized()
|
||||
assert root.satisfies("%gcc@=9.4.0")
|
||||
for s in root.traverse(root=False):
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
@pytest.mark.regression("43406")
|
||||
def test_externals_with_platform_explicitly_set(self, tmp_path):
|
||||
"""Tests that users can specify platform=xxx in an external spec"""
|
||||
external_conf = {
|
||||
"mpich": {
|
||||
"buildable": False,
|
||||
"externals": [{"spec": "mpich@=2.0.0 platform=test", "prefix": str(tmp_path)}],
|
||||
}
|
||||
}
|
||||
spack.config.set("packages", external_conf)
|
||||
s = Spec("mpich").concretized()
|
||||
assert s.external
|
||||
|
||||
@pytest.mark.regression("43875")
|
||||
def test_concretize_missing_compiler(self, mutable_config, monkeypatch):
|
||||
"""Tests that Spack can concretize a spec with a missing compiler when the
|
||||
option is active.
|
||||
"""
|
||||
|
||||
def _default_libc(self):
|
||||
if self.cc is None:
|
||||
return None
|
||||
return Spec("glibc@=2.28")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", property(_default_libc))
|
||||
monkeypatch.setattr(
|
||||
spack.util.libc, "libc_from_current_python_process", lambda: Spec("glibc@=2.28")
|
||||
)
|
||||
mutable_config.set("config:install_missing_compilers", True)
|
||||
s = Spec("a %gcc@=13.2.0").concretized()
|
||||
assert s.satisfies("%gcc@13.2.0")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
@@ -2498,6 +2586,29 @@ def test_no_multiple_solutions_with_different_edges_same_nodes(self):
|
||||
assert len(edges) == 1
|
||||
assert edges[0].spec.satisfies("@=60")
|
||||
|
||||
@pytest.mark.regression("43647")
|
||||
def test_specifying_different_versions_build_deps(self):
|
||||
"""Tests that we can concretize a spec with nodes using the same build
|
||||
dependency pinned at different versions, when the constraint is specified
|
||||
in the root spec.
|
||||
|
||||
o hdf5@1.0
|
||||
|\
|
||||
o | pinned-gmake@1.0
|
||||
o | gmake@3.0
|
||||
/
|
||||
o gmake@4.1
|
||||
|
||||
"""
|
||||
hdf5_str = "hdf5@1.0 ^gmake@4.1"
|
||||
pinned_str = "pinned-gmake@1.0 ^gmake@3.0"
|
||||
input_specs = [Spec(hdf5_str), Spec(pinned_str)]
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(input_specs)
|
||||
|
||||
assert any(x.satisfies(hdf5_str) for x in result.specs)
|
||||
assert any(x.satisfies(pinned_str) for x in result.specs)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"v_str,v_opts,checksummed",
|
||||
|
@@ -977,7 +977,7 @@ def test_single_file_scope(config, env_yaml):
|
||||
# from the single-file config
|
||||
assert spack.config.get("config:verify_ssl") is False
|
||||
assert spack.config.get("config:dirty") is False
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3", "gcc", "clang"]
|
||||
|
||||
# from the lower config scopes
|
||||
assert spack.config.get("config:checksum") is True
|
||||
@@ -1276,7 +1276,7 @@ def test_user_config_path_is_default_when_env_var_is_empty(working_env):
|
||||
|
||||
def test_default_install_tree(monkeypatch, default_config):
|
||||
s = spack.spec.Spec("nonexistent@x.y.z %none@a.b.c arch=foo-bar-baz")
|
||||
monkeypatch.setattr(s, "dag_hash", lambda: "abc123")
|
||||
monkeypatch.setattr(s, "dag_hash", lambda length: "abc123")
|
||||
_, _, projections = spack.store.parse_install_tree(spack.config.get("config"))
|
||||
assert s.format(projections["all"]) == "foo-bar-baz/none-a.b.c/nonexistent-x.y.z-abc123"
|
||||
|
||||
|
@@ -34,6 +34,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.caches
|
||||
import spack.cmd.buildcache
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.database
|
||||
@@ -55,6 +56,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.version
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -268,10 +270,6 @@ def clean_test_environment():
|
||||
ev.deactivate()
|
||||
|
||||
|
||||
def _verify_executables_noop(*args):
|
||||
return None
|
||||
|
||||
|
||||
def _host():
|
||||
"""Mock archspec host so there is no inconsistency on the Windows platform
|
||||
This function cannot be local as it needs to be pickleable"""
|
||||
@@ -297,9 +295,7 @@ def mock_compiler_executable_verification(request, monkeypatch):
|
||||
|
||||
If a test is marked in that way this is a no-op."""
|
||||
if "enable_compiler_verification" not in request.keywords:
|
||||
monkeypatch.setattr(
|
||||
spack.compiler.Compiler, "verify_executables", _verify_executables_noop
|
||||
)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "verify_executables", _return_none)
|
||||
|
||||
|
||||
# Hooks to add command line options or set other custom behaviors.
|
||||
@@ -790,6 +786,7 @@ def no_compilers_yaml(mutable_config):
|
||||
compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
return mutable_config
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -932,26 +929,16 @@ def dirs_with_libfiles(tmpdir_factory):
|
||||
yield lib_to_dirs, all_dirs
|
||||
|
||||
|
||||
def _compiler_link_paths_noop(*args):
|
||||
return []
|
||||
def _return_none(*args):
|
||||
return None
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def disable_compiler_execution(monkeypatch, request):
|
||||
"""
|
||||
This fixture can be disabled for tests of the compiler link path
|
||||
functionality by::
|
||||
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
|
||||
If a test is marked in that way this is a no-op."""
|
||||
if "enable_compiler_link_paths" not in request.keywords:
|
||||
# Compiler.determine_implicit_rpaths actually runs the compiler. So
|
||||
# replace that function with a noop that simulates finding no implicit
|
||||
# RPATHs
|
||||
monkeypatch.setattr(
|
||||
spack.compiler.Compiler, "_get_compiler_link_paths", _compiler_link_paths_noop
|
||||
)
|
||||
"""Disable compiler execution to determine implicit link paths and libc flavor and version.
|
||||
To re-enable use `@pytest.mark.enable_compiler_execution`"""
|
||||
if "enable_compiler_execution" not in request.keywords:
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _return_none)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -1439,6 +1426,15 @@ def mock_git_repository(git, tmpdir_factory):
|
||||
yield t
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_git_test_package(mock_git_repository, mutable_mock_repo, monkeypatch):
|
||||
# install a fake git version in the package class
|
||||
pkg_class = spack.repo.PATH.get_pkg_class("git-test")
|
||||
monkeypatch.delattr(pkg_class, "git")
|
||||
monkeypatch.setitem(pkg_class.versions, spack.version.Version("git"), mock_git_repository.url)
|
||||
return pkg_class
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mock_hg_repository(tmpdir_factory):
|
||||
"""Creates a very simple hg repository with two commits."""
|
||||
@@ -1970,17 +1966,24 @@ def mock_modules_root(tmp_path, monkeypatch):
|
||||
monkeypatch.setattr(spack.modules.common, "root_path", fn)
|
||||
|
||||
|
||||
_repo_name_id = 0
|
||||
|
||||
|
||||
def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
global _repo_name_id
|
||||
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
f"""\
|
||||
repo:
|
||||
namespace: testcfgrequirements
|
||||
namespace: testrepo{str(_repo_name_id)}
|
||||
"""
|
||||
)
|
||||
|
||||
_repo_name_id += 1
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in pkg_name_content_tuples:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
concretizer:
|
||||
# reuse is missing on purpose, see "test_concretizer_arguments"
|
||||
reuse: True
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
host_compatible: false
|
||||
|
@@ -1,5 +1,6 @@
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang]
|
||||
providers:
|
||||
mpi: [openmpi, mpich, zmpi]
|
||||
lapack: [openblas-with-lapack]
|
||||
|
@@ -1106,3 +1106,31 @@ def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_global
|
||||
lock_cfg = lock_cfg or spack.database.lock_configuration(config)
|
||||
db = spack.database.Database(str(tmpdir), lock_cfg=lock_cfg)
|
||||
assert os.path.exists(db.database_directory)
|
||||
|
||||
|
||||
def test_database_read_works_with_trailing_data(tmp_path, default_mock_concretization):
|
||||
# Populate a database
|
||||
root = str(tmp_path)
|
||||
db = spack.database.Database(root)
|
||||
spec = default_mock_concretization("a")
|
||||
db.add(spec, directory_layout=None)
|
||||
specs_in_db = db.query_local()
|
||||
assert spec in specs_in_db
|
||||
|
||||
# Append anything to the end of the database file
|
||||
with open(db._index_path, "a") as f:
|
||||
f.write(json.dumps({"hello": "world"}))
|
||||
|
||||
# Read the database and check that it ignores the trailing data
|
||||
assert spack.database.Database(root).query_local() == specs_in_db
|
||||
|
||||
|
||||
def test_database_errors_with_just_a_version_key(tmp_path):
|
||||
root = str(tmp_path)
|
||||
db = spack.database.Database(root)
|
||||
next_version = f"{spack.database._DB_VERSION}.next"
|
||||
with open(db._index_path, "w") as f:
|
||||
f.write(json.dumps({"database": {"version": next_version}}))
|
||||
|
||||
with pytest.raises(spack.database.InvalidDatabaseVersionError):
|
||||
spack.database.Database(root).query_local()
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
|
||||
def test_false_directives_do_not_exist(mock_packages):
|
||||
@@ -142,3 +143,86 @@ def test_version_type_validation():
|
||||
# Try passing a bogus type; it's just that we want a nice error message
|
||||
with pytest.raises(spack.version.VersionError, match=msg):
|
||||
spack.directives._execute_version(package(name="python"), {})
|
||||
|
||||
|
||||
_pkgx = (
|
||||
"x",
|
||||
"""\
|
||||
class X(Package):
|
||||
version("1.3")
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
version("1.0")
|
||||
|
||||
variant("foo", default=False)
|
||||
|
||||
redistribute(binary=False, when="@1.1")
|
||||
redistribute(binary=False, when="@1.0:1.2+foo")
|
||||
redistribute(source=False, when="@1.0:1.2")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgy = (
|
||||
"y",
|
||||
"""\
|
||||
class Y(Package):
|
||||
version("2.1")
|
||||
version("2.0")
|
||||
|
||||
variant("bar", default=False)
|
||||
|
||||
redistribute(binary=False, source=False)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
yield create_test_repo(tmpdir, [_pkgx, _pkgy])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,distribute_src,distribute_bin",
|
||||
[
|
||||
("x@1.1~foo", False, False),
|
||||
("x@1.2+foo", False, False),
|
||||
("x@1.2~foo", False, True),
|
||||
("x@1.0~foo", False, True),
|
||||
("x@1.3+foo", True, True),
|
||||
("y@2.0", False, False),
|
||||
("y@2.1+bar", False, False),
|
||||
],
|
||||
)
|
||||
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
assert spec.package_class.redistribute_source(spec) == distribute_src
|
||||
concretized_spec = spec.concretized()
|
||||
assert concretized_spec.package.redistribute_binary == distribute_bin
|
||||
|
||||
|
||||
def test_redistribute_override_when():
|
||||
"""Allow a user to call `redistribute` twice to separately disable
|
||||
source and binary distribution for the same when spec.
|
||||
|
||||
The second call should not undo the effect of the first.
|
||||
"""
|
||||
|
||||
class MockPackage:
|
||||
name = "mock"
|
||||
disable_redistribute = {}
|
||||
|
||||
cls = MockPackage
|
||||
spack.directives._execute_redistribute(cls, source=False, when="@1.0")
|
||||
spec_key = spack.directives._make_when_spec("@1.0")
|
||||
assert not cls.disable_redistribute[spec_key].binary
|
||||
assert cls.disable_redistribute[spec_key].source
|
||||
spack.directives._execute_redistribute(cls, binary=False, when="@1.0")
|
||||
assert cls.disable_redistribute[spec_key].binary
|
||||
assert cls.disable_redistribute[spec_key].source
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -908,3 +909,129 @@ def test_find_first_file(tmpdir, bfs_depth):
|
||||
|
||||
# Should find first dir
|
||||
assert os.path.samefile(fs.find_first(root, "a", bfs_depth=bfs_depth), os.path.join(root, "a"))
|
||||
|
||||
|
||||
def test_rename_dest_exists(tmpdir):
|
||||
@contextmanager
|
||||
def setup_test_files():
|
||||
a = tmpdir.join("a", "file1")
|
||||
b = tmpdir.join("a", "file2")
|
||||
fs.touchp(a)
|
||||
fs.touchp(b)
|
||||
with open(a, "w") as oa, open(b, "w") as ob:
|
||||
oa.write("I am A")
|
||||
ob.write("I am B")
|
||||
yield a, b
|
||||
shutil.rmtree(tmpdir.join("a"))
|
||||
|
||||
@contextmanager
|
||||
def setup_test_dirs():
|
||||
a = tmpdir.join("d", "a")
|
||||
b = tmpdir.join("d", "b")
|
||||
fs.mkdirp(a)
|
||||
fs.mkdirp(b)
|
||||
yield a, b
|
||||
shutil.rmtree(tmpdir.join("d"))
|
||||
|
||||
# test standard behavior of rename
|
||||
# smoke test
|
||||
with setup_test_files() as files:
|
||||
a, b = files
|
||||
fs.rename(str(a), str(b))
|
||||
assert os.path.exists(b)
|
||||
assert not os.path.exists(a)
|
||||
with open(b, "r") as ob:
|
||||
content = ob.read()
|
||||
assert content == "I am A"
|
||||
|
||||
# test relatitve paths
|
||||
# another sanity check/smoke test
|
||||
with setup_test_files() as files:
|
||||
a, b = files
|
||||
with fs.working_dir(str(tmpdir)):
|
||||
fs.rename(os.path.join("a", "file1"), os.path.join("a", "file2"))
|
||||
assert os.path.exists(b)
|
||||
assert not os.path.exists(a)
|
||||
with open(b, "r") as ob:
|
||||
content = ob.read()
|
||||
assert content == "I am A"
|
||||
|
||||
# Test rename symlinks to same file
|
||||
c = tmpdir.join("a", "file1")
|
||||
a = tmpdir.join("a", "link1")
|
||||
b = tmpdir.join("a", "link2")
|
||||
fs.touchp(c)
|
||||
symlink(c, a)
|
||||
symlink(c, b)
|
||||
fs.rename(str(a), str(b))
|
||||
assert os.path.exists(b)
|
||||
assert not os.path.exists(a)
|
||||
assert os.path.realpath(b) == c
|
||||
shutil.rmtree(tmpdir.join("a"))
|
||||
|
||||
# test rename onto itself
|
||||
a = tmpdir.join("a", "file1")
|
||||
b = a
|
||||
fs.touchp(a)
|
||||
with open(a, "w") as oa:
|
||||
oa.write("I am A")
|
||||
fs.rename(str(a), str(b))
|
||||
# check a, or b, doesn't matter, same file
|
||||
assert os.path.exists(a)
|
||||
# ensure original file was not duplicated
|
||||
assert len(os.listdir(tmpdir.join("a"))) == 1
|
||||
with open(a, "r") as oa:
|
||||
assert oa.read()
|
||||
shutil.rmtree(tmpdir.join("a"))
|
||||
|
||||
# test rename onto symlink
|
||||
# to directory from symlink to directory
|
||||
# (this is something spack does when regenerating views)
|
||||
with setup_test_dirs() as dirs:
|
||||
a, b = dirs
|
||||
link1 = tmpdir.join("f", "link1")
|
||||
link2 = tmpdir.join("f", "link2")
|
||||
fs.mkdirp(tmpdir.join("f"))
|
||||
symlink(a, link1)
|
||||
symlink(b, link2)
|
||||
fs.rename(str(link1), str(link2))
|
||||
assert os.path.exists(link2)
|
||||
assert os.path.realpath(link2) == a
|
||||
shutil.rmtree(tmpdir.join("f"))
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != "win32", reason="No-op on non Windows")
|
||||
def test_windows_sfn(tmpdir):
|
||||
# first check some standard Windows locations
|
||||
# we know require sfn names
|
||||
# this is basically a smoke test
|
||||
# ensure spaces are replaced + path abbreviated
|
||||
assert fs.windows_sfn("C:\\Program Files (x86)") == "C:\\PROGRA~2"
|
||||
# ensure path without spaces is still properly shortened
|
||||
assert fs.windows_sfn("C:\\ProgramData") == "C:\\PROGRA~3"
|
||||
|
||||
# test user created paths
|
||||
# ensure longer path with spaces is properly abbreviated
|
||||
a = tmpdir.join("d", "this is a test", "a", "still test")
|
||||
# ensure longer path is properly abbreviated
|
||||
b = tmpdir.join("d", "long_path_with_no_spaces", "more_long_path")
|
||||
# ensure path not in need of abbreviation is properly roundtripped
|
||||
c = tmpdir.join("d", "this", "is", "short")
|
||||
# ensure paths that are the same in the first six letters
|
||||
# are incremented post tilde
|
||||
d = tmpdir.join("d", "longerpath1")
|
||||
e = tmpdir.join("d", "longerpath2")
|
||||
fs.mkdirp(a)
|
||||
fs.mkdirp(b)
|
||||
fs.mkdirp(c)
|
||||
fs.mkdirp(d)
|
||||
fs.mkdirp(e)
|
||||
# check only for path of path we can control,
|
||||
# pytest prefix may or may not be mangled by windows_sfn
|
||||
# based on user/pytest config
|
||||
assert "d\\THISIS~1\\a\\STILLT~1" in fs.windows_sfn(a)
|
||||
assert "d\\LONG_P~1\\MORE_L~1" in fs.windows_sfn(b)
|
||||
assert "d\\this\\is\\short" in fs.windows_sfn(c)
|
||||
assert "d\\LONGER~1" in fs.windows_sfn(d)
|
||||
assert "d\\LONGER~2" in fs.windows_sfn(e)
|
||||
shutil.rmtree(tmpdir.join("d"))
|
||||
|
@@ -146,9 +146,6 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
|
||||
assert len([x for x in content if "depends_on(" in x]) == 5
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
"""Tests modifications to run-time environment."""
|
||||
|
||||
|
@@ -114,9 +114,6 @@ def test_prerequisites_all(
|
||||
|
||||
assert len([x for x in content if "prereq" in x]) == 5
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
"""Tests modifications to run-time environment."""
|
||||
|
||||
|
51
lib/spack/spack/test/solver/intermediate.py
Normal file
51
lib/spack/spack/test/solver/intermediate.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Unit tests for objects turning configuration into an intermediate format used by the solver."""
|
||||
import pytest
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.concretize import UnavailableCompilerVersionError
|
||||
from spack.solver import asp
|
||||
|
||||
|
||||
class TestCompilerParser:
|
||||
def test_expected_order_mock_config(self, config):
|
||||
"""Tests the expected preference order in the mock compiler configuration"""
|
||||
parser = asp.CompilerParser(config)
|
||||
expected_order = ["gcc@=10.2.1", "gcc@=9.4.0", "gcc@=9.4.0", "clang@=15.0.0"]
|
||||
for c, expected in zip(parser.possible_compilers(), expected_order):
|
||||
assert c.spec.satisfies(expected)
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["a %gcc@=13.2.0", "a ^b %gcc@=13.2.0"])
|
||||
def test_compiler_from_input_raise(self, spec_str, config):
|
||||
"""Tests that having an unknown compiler in the input spec raises an exception, if we
|
||||
don't allow bootstrapping missing compilers.
|
||||
"""
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
with pytest.raises(UnavailableCompilerVersionError):
|
||||
asp.CompilerParser(config).with_input_specs([spec])
|
||||
|
||||
def test_compilers_inferred_from_concrete_specs(self, mutable_config, mutable_database):
|
||||
"""Test that compilers inferred from concrete specs, that are not in the local
|
||||
configuration too, are last in the preference order.
|
||||
"""
|
||||
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
|
||||
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
|
||||
|
||||
parser = asp.CompilerParser(mutable_config)
|
||||
for reuse_spec in mutable_database.query():
|
||||
parser.add_compiler_from_concrete_spec(reuse_spec)
|
||||
|
||||
expected_order = [
|
||||
("gcc@=9.4.0", True),
|
||||
("gcc@=9.4.0", True),
|
||||
("clang@=15.0.0", True),
|
||||
("gcc@=10.2.1", False),
|
||||
]
|
||||
for c, (expected, available) in zip(parser.possible_compilers(), expected_order):
|
||||
assert c.spec.satisfies(expected)
|
||||
assert c.available is available
|
@@ -703,22 +703,25 @@ def check_prop(check_spec, fmt_str, prop, getter):
|
||||
actual = spec.format(named_str)
|
||||
assert expected == actual
|
||||
|
||||
def test_spec_formatting_escapes(self, default_mock_concretization):
|
||||
spec = default_mock_concretization("multivalue-variant cflags=-O2")
|
||||
|
||||
sigil_mismatches = [
|
||||
@pytest.mark.parametrize(
|
||||
"fmt_str",
|
||||
[
|
||||
"{@name}",
|
||||
"{@version.concrete}",
|
||||
"{%compiler.version}",
|
||||
"{/hashd}",
|
||||
"{arch=architecture.os}",
|
||||
]
|
||||
],
|
||||
)
|
||||
def test_spec_formatting_sigil_mismatches(self, default_mock_concretization, fmt_str):
|
||||
spec = default_mock_concretization("multivalue-variant cflags=-O2")
|
||||
|
||||
for fmt_str in sigil_mismatches:
|
||||
with pytest.raises(SpecFormatSigilError):
|
||||
spec.format(fmt_str)
|
||||
with pytest.raises(SpecFormatSigilError):
|
||||
spec.format(fmt_str)
|
||||
|
||||
bad_formats = [
|
||||
@pytest.mark.parametrize(
|
||||
"fmt_str",
|
||||
[
|
||||
r"{}",
|
||||
r"name}",
|
||||
r"\{name}",
|
||||
@@ -728,11 +731,12 @@ def test_spec_formatting_escapes(self, default_mock_concretization):
|
||||
r"{dag_hash}",
|
||||
r"{foo}",
|
||||
r"{+variants.debug}",
|
||||
]
|
||||
|
||||
for fmt_str in bad_formats:
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
spec.format(fmt_str)
|
||||
],
|
||||
)
|
||||
def test_spec_formatting_bad_formats(self, default_mock_concretization, fmt_str):
|
||||
spec = default_mock_concretization("multivalue-variant cflags=-O2")
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
spec.format(fmt_str)
|
||||
|
||||
def test_combination_of_wildcard_or_none(self):
|
||||
# Test that using 'none' and another value raises
|
||||
@@ -1096,22 +1100,22 @@ def test_unsatisfiable_virtual_deps_bindings(self, spec_str):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
("zlib@git.foo/bar", "{name}-{version}", str(pathlib.Path("zlib-git.foo_bar"))),
|
||||
("zlib@git.foo/bar", "{name}-{version}-{/hash}", None),
|
||||
("zlib@git.foo/bar", "{name}/{version}", str(pathlib.Path("zlib", "git.foo_bar"))),
|
||||
("git-test@git.foo/bar", "{name}-{version}", str(pathlib.Path("git-test-git.foo_bar"))),
|
||||
("git-test@git.foo/bar", "{name}-{version}-{/hash}", None),
|
||||
("git-test@git.foo/bar", "{name}/{version}", str(pathlib.Path("git-test", "git.foo_bar"))),
|
||||
(
|
||||
"zlib@{0}=1.0%gcc".format("a" * 40),
|
||||
"git-test@{0}=1.0%gcc".format("a" * 40),
|
||||
"{name}/{version}/{compiler}",
|
||||
str(pathlib.Path("zlib", "{0}_1.0".format("a" * 40), "gcc")),
|
||||
str(pathlib.Path("git-test", "{0}_1.0".format("a" * 40), "gcc")),
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar=1.0%gcc",
|
||||
"git-test@git.foo/bar=1.0%gcc",
|
||||
"{name}/{version}/{compiler}",
|
||||
str(pathlib.Path("zlib", "git.foo_bar_1.0", "gcc")),
|
||||
str(pathlib.Path("git-test", "git.foo_bar_1.0", "gcc")),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path(spec_str, format_str, expected):
|
||||
def test_spec_format_path(spec_str, format_str, expected, mock_git_test_package):
|
||||
_check_spec_format_path(spec_str, format_str, expected)
|
||||
|
||||
|
||||
@@ -1129,45 +1133,57 @@ def _check_spec_format_path(spec_str, format_str, expected, path_ctor=None):
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
"git-test@git.foo/bar",
|
||||
r"C:\\installroot\{name}\{version}",
|
||||
r"C:\installroot\zlib\git.foo_bar",
|
||||
r"C:\installroot\git-test\git.foo_bar",
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
"git-test@git.foo/bar",
|
||||
r"\\hostname\sharename\{name}\{version}",
|
||||
r"\\hostname\sharename\zlib\git.foo_bar",
|
||||
r"\\hostname\sharename\git-test\git.foo_bar",
|
||||
),
|
||||
# leading '/' is preserved on windows but converted to '\'
|
||||
# note that it's still not "absolute" -- absolute windows paths start with a drive.
|
||||
(
|
||||
"git-test@git.foo/bar",
|
||||
r"/installroot/{name}/{version}",
|
||||
r"\installroot\git-test\git.foo_bar",
|
||||
),
|
||||
# Windows doesn't attribute any significance to a leading
|
||||
# "/" so it is discarded
|
||||
("zlib@git.foo/bar", r"/installroot/{name}/{version}", r"installroot\zlib\git.foo_bar"),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path_windows(spec_str, format_str, expected):
|
||||
def test_spec_format_path_windows(spec_str, format_str, expected, mock_git_test_package):
|
||||
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PureWindowsPath)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
("zlib@git.foo/bar", r"/installroot/{name}/{version}", "/installroot/zlib/git.foo_bar"),
|
||||
("zlib@git.foo/bar", r"//installroot/{name}/{version}", "//installroot/zlib/git.foo_bar"),
|
||||
(
|
||||
"git-test@git.foo/bar",
|
||||
r"/installroot/{name}/{version}",
|
||||
"/installroot/git-test/git.foo_bar",
|
||||
),
|
||||
(
|
||||
"git-test@git.foo/bar",
|
||||
r"//installroot/{name}/{version}",
|
||||
"//installroot/git-test/git.foo_bar",
|
||||
),
|
||||
# This is likely unintentional on Linux: Firstly, "\" is not a
|
||||
# path separator for POSIX, so this is treated as a single path
|
||||
# component (containing literal "\" characters); secondly,
|
||||
# Spec.format treats "\" as an escape character, so is
|
||||
# discarded (unless directly following another "\")
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
"git-test@git.foo/bar",
|
||||
r"C:\\installroot\package-{name}-{version}",
|
||||
r"C__installrootpackage-zlib-git.foo_bar",
|
||||
r"C__installrootpackage-git-test-git.foo_bar",
|
||||
),
|
||||
# "\" is not a POSIX separator, and Spec.format treats "\{" as a literal
|
||||
# "{", which means that the resulting format string is invalid
|
||||
("zlib@git.foo/bar", r"package\{name}\{version}", None),
|
||||
("git-test@git.foo/bar", r"package\{name}\{version}", None),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path_posix(spec_str, format_str, expected):
|
||||
def test_spec_format_path_posix(spec_str, format_str, expected, mock_git_test_package):
|
||||
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PurePosixPath)
|
||||
|
||||
|
||||
|
@@ -551,12 +551,12 @@ def _specfile_for(spec_str, filename):
|
||||
"^[deptypes=build,link] zlib",
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
"git-test@git.foo/bar",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "git-test"),
|
||||
Token(TokenType.GIT_VERSION, "@git.foo/bar"),
|
||||
],
|
||||
"zlib@git.foo/bar",
|
||||
"git-test@git.foo/bar",
|
||||
),
|
||||
# Variant propagation
|
||||
(
|
||||
@@ -585,7 +585,7 @@ def _specfile_for(spec_str, filename):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip):
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_package):
|
||||
parser = SpecParser(spec_str)
|
||||
assert tokens == parser.tokens()
|
||||
assert expected_roundtrip == str(parser.next_spec())
|
||||
|
26
lib/spack/spack/test/util/libc.py
Normal file
26
lib/spack/spack/test/util/libc.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
from spack.util import libc
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"libc_prefix,startfile_prefix,expected",
|
||||
[
|
||||
# Ubuntu
|
||||
("/usr", "/usr/lib/x86_64-linux-gnu", "/usr/include/x86_64-linux-gnu"),
|
||||
("/usr", "/usr/lib/x86_64-linux-musl", "/usr/include/x86_64-linux-musl"),
|
||||
("/usr", "/usr/lib/aarch64-linux-gnu", "/usr/include/aarch64-linux-gnu"),
|
||||
("/usr", "/usr/lib/aarch64-linux-musl", "/usr/include/aarch64-linux-musl"),
|
||||
# rhel-like
|
||||
("/usr", "/usr/lib64", "/usr/include"),
|
||||
("/usr", "/usr/lib", "/usr/include"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.not_on_windows("The unit test deals with unix-like paths")
|
||||
def test_header_dir_computation(libc_prefix, startfile_prefix, expected):
|
||||
"""Tests that we compute the correct header directory from the prefix of the libc startfiles"""
|
||||
assert libc.libc_include_dir_from_startfile_prefix(libc_prefix, startfile_prefix) == expected
|
@@ -641,6 +641,30 @@ def substitute_rpath_and_pt_interp_in_place_or_raise(
|
||||
return False
|
||||
|
||||
|
||||
def pt_interp(path: str) -> Optional[str]:
|
||||
"""Retrieve the interpreter of an executable at `path`."""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
elf = parse_elf(f, interpreter=True)
|
||||
except (OSError, ElfParsingError):
|
||||
return None
|
||||
|
||||
if not elf.has_pt_interp:
|
||||
return None
|
||||
|
||||
return elf.pt_interp_str.decode("utf-8")
|
||||
|
||||
|
||||
def get_elf_compat(path):
|
||||
"""Get a triplet (EI_CLASS, EI_DATA, e_machine) from an ELF file, which can be used to see if
|
||||
two ELF files are compatible."""
|
||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
||||
# libraries, by dropping those that are not host compatible.
|
||||
with open(path, "rb") as f:
|
||||
elf = parse_elf(f, only_header=True)
|
||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
||||
|
||||
|
||||
class ElfCStringUpdatesFailed(Exception):
|
||||
def __init__(
|
||||
self, rpath: Optional[UpdateCStringAction], pt_interp: Optional[UpdateCStringAction]
|
||||
|
@@ -36,6 +36,8 @@
|
||||
|
||||
SYSTEM_DIRS = [os.path.join(p, s) for s in SUFFIXES for p in SYSTEM_PATHS] + SYSTEM_PATHS
|
||||
|
||||
#: used in the compiler wrapper's `/usr/lib|/usr/lib64|...)` case entry
|
||||
SYSTEM_DIR_CASE_ENTRY = "|".join(sorted(f'"{d}{suff}"' for d in SYSTEM_DIRS for suff in ("", "/")))
|
||||
|
||||
_SHELL_SET_STRINGS = {
|
||||
"sh": "export {0}={1};\n",
|
||||
@@ -642,8 +644,8 @@ def reversed(self) -> "EnvironmentModifications":
|
||||
elif isinstance(envmod, AppendFlagsEnv):
|
||||
rev.remove_flags(envmod.name, envmod.value)
|
||||
else:
|
||||
tty.warn(
|
||||
f"Skipping reversal of unreversable operation {type(envmod)} {envmod.name}"
|
||||
tty.debug(
|
||||
f"Skipping reversal of irreversible operation {type(envmod)} {envmod.name}"
|
||||
)
|
||||
|
||||
return rev
|
||||
|
176
lib/spack/spack/util/libc.py
Normal file
176
lib/spack/spack/util/libc.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from subprocess import PIPE, run
|
||||
from typing import Optional
|
||||
|
||||
import spack.spec
|
||||
import spack.util.elf
|
||||
|
||||
|
||||
def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
try:
|
||||
result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not re.search("gnu|glibc", stdout, re.IGNORECASE):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
if not version_str:
|
||||
return None
|
||||
try:
|
||||
return spack.spec.Spec(f"glibc@={version_str.group(1)}")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]:
|
||||
if not os.path.exists(dynamic_linker):
|
||||
return None
|
||||
|
||||
# The dynamic linker is usually installed in the same /lib(64)?/ld-*.so path across all
|
||||
# distros. The rest of libc is elsewhere, e.g. /usr. Typically the dynamic linker is then
|
||||
# a symlink into /usr/lib, which we use to for determining the actual install prefix of
|
||||
# libc.
|
||||
realpath = os.path.realpath(dynamic_linker)
|
||||
|
||||
prefix = os.path.dirname(realpath)
|
||||
# Remove the multiarch suffix if it exists
|
||||
if os.path.basename(prefix) not in ("lib", "lib64"):
|
||||
prefix = os.path.dirname(prefix)
|
||||
|
||||
# Non-standard install layout -- just bail.
|
||||
if os.path.basename(prefix) not in ("lib", "lib64"):
|
||||
return None
|
||||
|
||||
prefix = os.path.dirname(prefix)
|
||||
|
||||
# Now try to figure out if glibc or musl, which is the only ones we support.
|
||||
# In recent glibc we can simply execute the dynamic loader. In musl that's always the case.
|
||||
try:
|
||||
result = run([dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
stderr = result.stderr.decode("utf-8")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# musl prints to stderr
|
||||
if stderr.startswith("musl libc"):
|
||||
version_str = re.search(r"^Version (.+)$", stderr, re.MULTILINE)
|
||||
if not version_str:
|
||||
return None
|
||||
try:
|
||||
spec = spack.spec.Spec(f"musl@={version_str.group(1)}")
|
||||
spec.external_path = prefix
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search("gnu|glibc", stdout, re.IGNORECASE):
|
||||
# output is like "ld.so (...) stable release version 2.33." write a regex for it
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
return None
|
||||
try:
|
||||
version = match.group(1)
|
||||
spec = spack.spec.Spec(f"glibc@={version}")
|
||||
spec.external_path = prefix
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
else:
|
||||
# Could not get the version by running the dynamic linker directly. Instead locate `ldd`
|
||||
# relative to the dynamic linker.
|
||||
ldd = os.path.join(prefix, "bin", "ldd")
|
||||
if not os.path.exists(ldd):
|
||||
# If `/lib64/ld.so` was not a symlink to `/usr/lib/ld.so` we can try to use /usr as
|
||||
# prefix. This is the case on ubuntu 18.04 where /lib != /usr/lib.
|
||||
if prefix != "/":
|
||||
return None
|
||||
prefix = "/usr"
|
||||
ldd = os.path.join(prefix, "bin", "ldd")
|
||||
if not os.path.exists(ldd):
|
||||
return None
|
||||
maybe_spec = _libc_from_ldd(ldd)
|
||||
if not maybe_spec:
|
||||
return None
|
||||
maybe_spec.external_path = prefix
|
||||
return maybe_spec
|
||||
|
||||
|
||||
def libc_from_current_python_process() -> Optional["spack.spec.Spec"]:
|
||||
if not sys.executable:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.elf.pt_interp(sys.executable)
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
|
||||
def startfile_prefix(prefix: str, compatible_with: str = sys.executable) -> Optional[str]:
|
||||
# Search for crt1.o at max depth 2 compatible with the ELF file provided in compatible_with.
|
||||
# This is useful for finding external libc startfiles on a multiarch system.
|
||||
try:
|
||||
compat = spack.util.elf.get_elf_compat(compatible_with)
|
||||
accept = lambda path: spack.util.elf.get_elf_compat(path) == compat
|
||||
except Exception:
|
||||
accept = lambda path: True
|
||||
|
||||
queue = [(0, prefix)]
|
||||
while queue:
|
||||
depth, path = queue.pop()
|
||||
try:
|
||||
iterator = os.scandir(path)
|
||||
except OSError:
|
||||
continue
|
||||
with iterator:
|
||||
for entry in iterator:
|
||||
try:
|
||||
if entry.is_dir(follow_symlinks=True):
|
||||
if depth < 2:
|
||||
queue.append((depth + 1, entry.path))
|
||||
elif entry.name == "crt1.o" and accept(entry.path):
|
||||
return path
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def parse_dynamic_linker(output: str):
|
||||
"""Parse -dynamic-linker /path/to/ld.so from compiler output"""
|
||||
for line in reversed(output.splitlines()):
|
||||
if "-dynamic-linker" not in line:
|
||||
continue
|
||||
args = shlex.split(line)
|
||||
|
||||
for idx in reversed(range(1, len(args))):
|
||||
arg = args[idx]
|
||||
if arg == "-dynamic-linker" or args == "--dynamic-linker":
|
||||
return args[idx + 1]
|
||||
elif arg.startswith("--dynamic-linker=") or arg.startswith("-dynamic-linker="):
|
||||
return arg.split("=", 1)[1]
|
||||
|
||||
|
||||
def libc_include_dir_from_startfile_prefix(
|
||||
libc_prefix: str, startfile_prefix: str
|
||||
) -> Optional[str]:
|
||||
"""Heuristic to determine the glibc include directory from the startfile prefix. Replaces
|
||||
$libc_prefix/lib*/<multiarch> with $libc_prefix/include/<multiarch>. This function does not
|
||||
check if the include directory actually exists or is correct."""
|
||||
parts = os.path.relpath(startfile_prefix, libc_prefix).split(os.path.sep)
|
||||
if parts[0] not in ("lib", "lib64", "libx32", "lib32"):
|
||||
return None
|
||||
parts[0] = "include"
|
||||
return os.path.join(libc_prefix, *parts)
|
@@ -707,6 +707,7 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s
|
||||
raw_link = metadata_parser.fragments.pop()
|
||||
abs_link = url_util.join(response_url, raw_link.strip(), resolve_href=True)
|
||||
|
||||
fragment_response_url = None
|
||||
try:
|
||||
# This seems to be text/html, though text/fragment+html is also used
|
||||
fragment_response_url, _, fragment_response = read_from_url(abs_link, "text/html")
|
||||
|
@@ -638,6 +638,9 @@ def copy(self):
|
||||
return clone
|
||||
|
||||
def __str__(self):
|
||||
if not self:
|
||||
return ""
|
||||
|
||||
# print keys in order
|
||||
sorted_keys = sorted(self.keys())
|
||||
|
||||
|
@@ -146,13 +146,11 @@ def from_string(string: str):
|
||||
|
||||
@staticmethod
|
||||
def typemin():
|
||||
return StandardVersion("", ((), (ALPHA,)), ("",))
|
||||
return _STANDARD_VERSION_TYPEMIN
|
||||
|
||||
@staticmethod
|
||||
def typemax():
|
||||
return StandardVersion(
|
||||
"infinity", ((VersionStrComponent(len(infinity_versions)),), (FINAL,)), ("",)
|
||||
)
|
||||
return _STANDARD_VERSION_TYPEMAX
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
@@ -390,6 +388,13 @@ def up_to(self, index):
|
||||
return self[:index]
|
||||
|
||||
|
||||
_STANDARD_VERSION_TYPEMIN = StandardVersion("", ((), (ALPHA,)), ("",))
|
||||
|
||||
_STANDARD_VERSION_TYPEMAX = StandardVersion(
|
||||
"infinity", ((VersionStrComponent(len(infinity_versions)),), (FINAL,)), ("",)
|
||||
)
|
||||
|
||||
|
||||
class GitVersion(ConcreteVersion):
|
||||
"""Class to represent versions interpreted from git refs.
|
||||
|
||||
@@ -1019,6 +1024,9 @@ def __hash__(self):
|
||||
return hash(tuple(self.versions))
|
||||
|
||||
def __str__(self):
|
||||
if not self.versions:
|
||||
return ""
|
||||
|
||||
return ",".join(
|
||||
f"={v}" if isinstance(v, StandardVersion) else str(v) for v in self.versions
|
||||
)
|
||||
@@ -1127,7 +1135,9 @@ def _prev_version(v: StandardVersion) -> StandardVersion:
|
||||
components[1::2] = separators[: len(release)]
|
||||
if prerelease_type != FINAL:
|
||||
components.extend((PRERELEASE_TO_STRING[prerelease_type], *prerelease[1:]))
|
||||
return StandardVersion("".join(str(c) for c in components), (release, prerelease), separators)
|
||||
|
||||
# this is only used for comparison functions, so don't bother making a string
|
||||
return StandardVersion(None, (release, prerelease), separators)
|
||||
|
||||
|
||||
def Version(string: Union[str, int]) -> Union[GitVersion, StandardVersion]:
|
||||
|
@@ -12,7 +12,7 @@ markers =
|
||||
requires_executables: tests that requires certain executables in PATH to run
|
||||
nomockstage: use a stage area specifically created for this test, instead of relying on a common mock stage
|
||||
enable_compiler_verification: enable compiler verification within unit tests
|
||||
enable_compiler_link_paths: verifies compiler link paths within unit tests
|
||||
enable_compiler_execution: enable compiler execution to detect link paths and libc
|
||||
disable_clean_stage_check: avoid failing tests if there are leftover files in the stage area
|
||||
only_clingo: mark unit tests that run only with clingo
|
||||
only_original: mark unit tests that are specific to the original concretizer
|
||||
|
@@ -761,32 +761,9 @@ ml-linux-x86_64-cuda-build:
|
||||
- artifacts: True
|
||||
job: ml-linux-x86_64-cuda-generate
|
||||
|
||||
########################################
|
||||
# Machine Learning - Linux x86_64 (ROCm)
|
||||
########################################
|
||||
.ml-linux-x86_64-rocm:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: ml-linux-x86_64-rocm
|
||||
|
||||
ml-linux-x86_64-rocm-generate:
|
||||
extends: [ ".generate-x86_64", .ml-linux-x86_64-rocm, ".tags-x86_64_v4" ]
|
||||
image: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
|
||||
ml-linux-x86_64-rocm-build:
|
||||
extends: [ ".build", ".ml-linux-x86_64-rocm" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: ml-linux-x86_64-rocm-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: ml-linux-x86_64-rocm-generate
|
||||
|
||||
########################################
|
||||
#########################################
|
||||
# Machine Learning - Darwin aarch64 (MPS)
|
||||
########################################
|
||||
#########################################
|
||||
.ml-darwin-aarch64-mps:
|
||||
extends: [".darwin_aarch64"]
|
||||
variables:
|
||||
|
@@ -13,7 +13,7 @@ ci:
|
||||
before_script-:
|
||||
- - cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- - spack list --count # ensure that spack's cache is populated
|
||||
- - time spack list --count # ensure that spack's cache is populated
|
||||
- - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack compiler list
|
||||
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
|
||||
@@ -29,7 +29,7 @@ ci:
|
||||
after_script:
|
||||
- - cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- - time python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
|
||||
- - time ./bin/spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
|
||||
--prefix /home/software/spack:${CI_PROJECT_DIR}/opt/spack
|
||||
--log install_times.json
|
||||
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json || true
|
||||
|
@@ -71,7 +71,6 @@ ci:
|
||||
|
||||
- match:
|
||||
- dealii
|
||||
- mxnet
|
||||
- rocblas
|
||||
build-job:
|
||||
tags: [ "spack", "huge" ]
|
||||
|
@@ -78,12 +78,14 @@ spack:
|
||||
- drishti
|
||||
- dxt-explorer
|
||||
- dyninst
|
||||
- e4s-cl
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # adios2~cuda, ascent~cuda, darshan-runtime, darshan-util, faodel, hdf5, libcatalyst, parallel-netcdf, paraview~cuda, py-cinemasci, sz, unifyfs, veloc, visit, vtk-m, zfp
|
||||
- exaworks
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- fpm
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
@@ -120,6 +122,7 @@ spack:
|
||||
- nccmp
|
||||
- nco
|
||||
- nekbone +mpi
|
||||
- netcdf-fortran
|
||||
- netlib-scalapack
|
||||
- nrm
|
||||
- nvhpc
|
||||
|
@@ -34,9 +34,6 @@ spack:
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# MXNet not supported on darwin aarch64 yet
|
||||
# - mxnet
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
@@ -84,8 +81,6 @@ spack:
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -28,9 +28,6 @@ spack:
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# MXNet
|
||||
- mxnet
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
@@ -77,8 +74,6 @@ spack:
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -32,9 +32,6 @@ spack:
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# MXNet
|
||||
- mxnet
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
@@ -81,8 +78,6 @@ spack:
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user