Compare commits
208 Commits
psakiev/de
...
hs/fix/cma
Author | SHA1 | Date | |
---|---|---|---|
![]() |
25c74506a3 | ||
![]() |
e37e53cfe8 | ||
![]() |
cf31d20d4c | ||
![]() |
b74db341c8 | ||
![]() |
e88a3f6f85 | ||
![]() |
9bd7483e73 | ||
![]() |
04c76fab63 | ||
![]() |
ecbf9fcacf | ||
![]() |
69fb594699 | ||
![]() |
d28614151f | ||
![]() |
f1d6af6c94 | ||
![]() |
192821f361 | ||
![]() |
18790ca397 | ||
![]() |
c22d77a38e | ||
![]() |
d82bdb3bf7 | ||
![]() |
a042bdfe0b | ||
![]() |
60e3e645e8 | ||
![]() |
51785437bc | ||
![]() |
2e8db0815d | ||
![]() |
8a6428746f | ||
![]() |
6b9c099af8 | ||
![]() |
30814fb4e0 | ||
![]() |
3194be2e92 | ||
![]() |
41be2f5899 | ||
![]() |
02af41ebb3 | ||
![]() |
9d33c89030 | ||
![]() |
51ab7bad3b | ||
![]() |
0b094f2473 | ||
![]() |
cd306d0bc6 | ||
![]() |
fdb9cf2412 | ||
![]() |
a546441d2e | ||
![]() |
141cdb6810 | ||
![]() |
f2ab74efe5 | ||
![]() |
38b838e405 | ||
![]() |
c037188b59 | ||
![]() |
0835a3c5f2 | ||
![]() |
38a2f9c2f2 | ||
![]() |
eecd4afe58 | ||
![]() |
83624551e0 | ||
![]() |
741652caa1 | ||
![]() |
8e914308f0 | ||
![]() |
3c220d0989 | ||
![]() |
8094fa1e2f | ||
![]() |
5c67051980 | ||
![]() |
c01fb9a6d2 | ||
![]() |
bf12bb57e7 | ||
![]() |
406c73ae11 | ||
![]() |
3f50ccfcdd | ||
![]() |
9883a2144d | ||
![]() |
94815d2227 | ||
![]() |
a15563f890 | ||
![]() |
ac2ede8d2f | ||
![]() |
b256a7c50d | ||
![]() |
21e10d6d98 | ||
![]() |
ed39967848 | ||
![]() |
eda0c6888e | ||
![]() |
66055f903c | ||
![]() |
a1c57d86c3 | ||
![]() |
9da8dcae97 | ||
![]() |
c93f223a73 | ||
![]() |
f1faf31735 | ||
![]() |
8957ef0df5 | ||
![]() |
347ec87fc5 | ||
![]() |
cd8c46e54e | ||
![]() |
75b03bc12f | ||
![]() |
58511a3352 | ||
![]() |
325873a4c7 | ||
![]() |
9156e4be04 | ||
![]() |
12d3abc736 | ||
![]() |
4208aa6291 | ||
![]() |
0bad754e23 | ||
![]() |
cde2620f41 | ||
![]() |
a35aa038b0 | ||
![]() |
150416919e | ||
![]() |
281c274e0b | ||
![]() |
16e130ece1 | ||
![]() |
7586303fba | ||
![]() |
6501880fbf | ||
![]() |
c76098038c | ||
![]() |
124b616b27 | ||
![]() |
1148c8f195 | ||
![]() |
c57452dd08 | ||
![]() |
a7e57c9a14 | ||
![]() |
85d83f9c26 | ||
![]() |
39a081d7fd | ||
![]() |
71b65bb424 | ||
![]() |
3dcbd118df | ||
![]() |
5dacb774f6 | ||
![]() |
cb3d6549c9 | ||
![]() |
559c2f1eb9 | ||
![]() |
ed1dbea77b | ||
![]() |
6ebafe4631 | ||
![]() |
7f0bb7147d | ||
![]() |
f41b38e93d | ||
![]() |
5fd12b7bea | ||
![]() |
fe746bdebb | ||
![]() |
453af4b9f7 | ||
![]() |
29cf1559cc | ||
![]() |
a9b3e1670b | ||
![]() |
4f9aa6004b | ||
![]() |
aa2c18e4df | ||
![]() |
0ff3e86315 | ||
![]() |
df208c1095 | ||
![]() |
853f70edc8 | ||
![]() |
50970f866e | ||
![]() |
8821300985 | ||
![]() |
adc8e1d996 | ||
![]() |
1e0aac6ac3 | ||
![]() |
99e2313d81 | ||
![]() |
22690a7576 | ||
![]() |
5325cfe865 | ||
![]() |
5333925dd7 | ||
![]() |
2db99e1ff6 | ||
![]() |
68aa712a3e | ||
![]() |
2e71bc640c | ||
![]() |
661f3621a7 | ||
![]() |
f182032337 | ||
![]() |
066666b7b1 | ||
![]() |
73316c3e28 | ||
![]() |
c8e4ae08da | ||
![]() |
44225caade | ||
![]() |
8d325d3e30 | ||
![]() |
d0fd112006 | ||
![]() |
50f43ca71d | ||
![]() |
2546fb6afa | ||
![]() |
10f6863d91 | ||
![]() |
63ea528606 | ||
![]() |
89d2b9553d | ||
![]() |
278326b4d9 | ||
![]() |
43c1a5e0ec | ||
![]() |
8feb506b3a | ||
![]() |
627544191a | ||
![]() |
cf672ea8af | ||
![]() |
2c4ac02adf | ||
![]() |
7f76490b31 | ||
![]() |
46e4c1fd30 | ||
![]() |
85c5533e62 | ||
![]() |
c47cafd11a | ||
![]() |
8e33cc158b | ||
![]() |
f07173e5ee | ||
![]() |
118f5d2683 | ||
![]() |
8fb2abc3cd | ||
![]() |
3bcb8a9236 | ||
![]() |
a6fdd7608f | ||
![]() |
1ffd7125a6 | ||
![]() |
d1166fd316 | ||
![]() |
b8eba1c677 | ||
![]() |
e3c0515076 | ||
![]() |
97406f241c | ||
![]() |
e1dfbbf611 | ||
![]() |
52147348c7 | ||
![]() |
aeb0ab6acf | ||
![]() |
6cd26b7603 | ||
![]() |
1c75d07f05 | ||
![]() |
15b3ff2a0a | ||
![]() |
e9f94d9bf2 | ||
![]() |
299324c7ca | ||
![]() |
dfab174f31 | ||
![]() |
a86953fcb1 | ||
![]() |
5f262eb5d3 | ||
![]() |
00f179ee6d | ||
![]() |
da4f7c2952 | ||
![]() |
fdedb6f95d | ||
![]() |
067fefc46a | ||
![]() |
42c9961bbe | ||
![]() |
fe2bf4c0f9 | ||
![]() |
4d3b85c4d4 | ||
![]() |
f05cbfbf44 | ||
![]() |
448049ccfc | ||
![]() |
e56057fd79 | ||
![]() |
26d80e7bc5 | ||
![]() |
60eb0e9c80 | ||
![]() |
7443a3b572 | ||
![]() |
a5ba4f8d91 | ||
![]() |
6ef0f495a9 | ||
![]() |
e91b8c291a | ||
![]() |
6662046aca | ||
![]() |
db83c62fb1 | ||
![]() |
d4adfda385 | ||
![]() |
e8a8e2d98b | ||
![]() |
55c770c556 | ||
![]() |
33a796801c | ||
![]() |
b90ac6441c | ||
![]() |
68b69aa9e3 | ||
![]() |
ac0ed2c4cc | ||
![]() |
66a93b5433 | ||
![]() |
b7993317ea | ||
![]() |
66622ec4d0 | ||
![]() |
9b2cd1b208 | ||
![]() |
9888683a21 | ||
![]() |
fb46c7a72d | ||
![]() |
c0196cde39 | ||
![]() |
d091172d67 | ||
![]() |
ab51369087 | ||
![]() |
1cea82b629 | ||
![]() |
2abb711337 | ||
![]() |
6f948eb847 | ||
![]() |
93bf0634f3 | ||
![]() |
badb3cedcd | ||
![]() |
be918817d6 | ||
![]() |
41d9f687f6 | ||
![]() |
9642b04513 | ||
![]() |
bf16f0bf74 | ||
![]() |
ad518d975c | ||
![]() |
a76e3f2030 | ||
![]() |
1809b81e1d | ||
![]() |
a02b40b670 | ||
![]() |
6d8fdbcf82 |
11
.github/workflows/build-containers.yml
vendored
11
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,13 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
- name: Determine latest release tag
|
||||
id: latest
|
||||
run: |
|
||||
git fetch --quiet --tags
|
||||
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
@@ -71,6 +77,7 @@ jobs:
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
@@ -113,7 +120,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -29,6 +29,6 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
with:
|
||||
verbose: true
|
||||
|
@@ -70,7 +70,7 @@ Tutorial
|
||||
----------------
|
||||
|
||||
We maintain a
|
||||
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
|
||||
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
|
||||
It covers basic to advanced usage, packaging, developer features, and large HPC
|
||||
deployments. You can do all of the exercises on your own laptop using a
|
||||
Docker container.
|
||||
|
@@ -39,7 +39,8 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
@@ -47,3 +48,18 @@ concretizer:
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
|
||||
timeout: 0
|
||||
# If set to true, exceeding the timeout will always result in a concretization error. If false,
|
||||
# the best (suboptimal) model computed before the timeout is used.
|
||||
#
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
@@ -237,3 +237,35 @@ is optional -- by default, splices will be transitive.
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
@@ -210,7 +210,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
|
@@ -1042,7 +1042,7 @@ file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
excludes those built with the GCC compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
@@ -1056,7 +1056,7 @@ directories.
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
exclude: ['%gcc@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
|
@@ -283,10 +283,6 @@ compilers`` or ``spack compiler list``:
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -806,65 +802,6 @@ flags to the ``icc`` command:
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
PGI
|
||||
^^^
|
||||
|
||||
PGI comes with two sets of compilers for C++ and Fortran,
|
||||
distinguishable by their names. "Old" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||
|
||||
"New" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
|
||||
Older installations of PGI contains just the old compilers; whereas
|
||||
newer installations contain the old and the new. The new compiler is
|
||||
considered preferable, as some packages
|
||||
(``hdf``) will not build with the old compiler.
|
||||
|
||||
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||
find the old compilers, when you really want it to find the new
|
||||
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||
``pgfortran``.
|
||||
|
||||
Other issues:
|
||||
|
||||
* There are reports that some packages will not build with PGI,
|
||||
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||
build these packages with another compiler and then use them as
|
||||
dependencies for PGI-build packages. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||
|
||||
|
||||
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||
information on installation.
|
||||
|
||||
.. note::
|
||||
|
||||
It is believed the problem with HDF 4 is that everything is
|
||||
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||
work.
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
@@ -1389,6 +1326,7 @@ Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
* 7z
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
@@ -1454,6 +1392,13 @@ as the project providing Git support on Windows. This is additionally the recomm
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
"""
|
||||
7zip
|
||||
"""
|
||||
|
||||
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
|
||||
can be located at https://sourceforge.net/projects/sevenzip/.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1367,8 +1367,8 @@ Submodules
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
@@ -1928,71 +1928,29 @@ to the empty list.
|
||||
String. A URL pointing to license setup instructions for the software.
|
||||
Defaults to the empty string.
|
||||
|
||||
For example, let's take a look at the package for the PGI compilers.
|
||||
For example, let's take a look at the Arm Forge package.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Licensing
|
||||
license_required = True
|
||||
license_comment = "#"
|
||||
license_files = ["license.dat"]
|
||||
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
|
||||
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
|
||||
license_comment = "#"
|
||||
license_files = ["licences/Licence"]
|
||||
license_vars = [
|
||||
"ALLINEA_LICENSE_DIR",
|
||||
"ALLINEA_LICENCE_DIR",
|
||||
"ALLINEA_LICENSE_FILE",
|
||||
"ALLINEA_LICENCE_FILE",
|
||||
]
|
||||
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
||||
|
||||
As you can see, PGI requires a license. Its license manager, FlexNet, uses
|
||||
the ``#`` symbol to denote a comment. It expects the license file to be
|
||||
named ``license.dat`` and to be located directly in the installation prefix.
|
||||
If you would like the installation file to be located elsewhere, simply set
|
||||
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
|
||||
further instructions on installation and licensing, see the URL provided.
|
||||
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
|
||||
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
|
||||
in the installation prefix.
|
||||
|
||||
Let's walk through a sample PGI installation to see exactly what Spack is
|
||||
and isn't capable of. Since PGI does not provide a download URL, it must
|
||||
be downloaded manually. It can either be added to a mirror or located in
|
||||
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
|
||||
for instructions on setting up a mirror.
|
||||
|
||||
After running ``spack install pgi``, the first thing that will happen is
|
||||
Spack will create a global license file located at
|
||||
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
|
||||
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
|
||||
this:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# A license is required to use pgi.
|
||||
#
|
||||
# The recommended solution is to store your license key in this global
|
||||
# license file. After installation, the following symlink(s) will be
|
||||
# added to point to this file (relative to the installation prefix):
|
||||
#
|
||||
# license.dat
|
||||
#
|
||||
# Alternatively, use one of the following environment variable(s):
|
||||
#
|
||||
# PGROUPD_LICENSE_FILE
|
||||
# LM_LICENSE_FILE
|
||||
#
|
||||
# If you choose to store your license in a non-standard location, you may
|
||||
# set one of these variable(s) to the full pathname to the license file, or
|
||||
# port@host if you store your license keys on a dedicated license server.
|
||||
# You will likely want to set this variable in a module file so that it
|
||||
# gets loaded every time someone tries to use pgi.
|
||||
#
|
||||
# For further information on how to acquire a license, please refer to:
|
||||
#
|
||||
# http://www.pgroup.com/doc/pgiinstall.pdf
|
||||
#
|
||||
# You may enter your license below.
|
||||
|
||||
You can add your license directly to this file, or tell FlexNet to use a
|
||||
license stored on a separate license server. Here is an example that
|
||||
points to a license server called licman1:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||
USE_SERVER
|
||||
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
|
||||
one of the other license variables after installation. For further instructions on installation and
|
||||
licensing, see the URL provided.
|
||||
|
||||
If your package requires the license to install, you can reference the
|
||||
location of this global license using ``self.global_license_file``.
|
||||
@@ -2392,7 +2350,7 @@ by the ``--jobs`` option:
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
@@ -2967,9 +2925,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
@@ -5420,7 +5378,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
@@ -5737,7 +5695,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
@@ -7113,6 +7071,46 @@ might write:
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
.. _abi_compatibility:
|
||||
|
||||
----------------------------
|
||||
Specifying ABI Compatibility
|
||||
----------------------------
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
||||
always replace version 1.0, then the package could have:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.0", when="@1.1")
|
||||
|
||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
||||
other packages providing the same virtual. For example, ``zlib-ng``
|
||||
could specify:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
||||
|
||||
Some packages have ABI-compatibility that is dependent on matching
|
||||
variant values, either for all variants or for some set of
|
||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
||||
cover all single-value variants.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
||||
|
||||
The concretizer will use ABI compatibility to determine automatic
|
||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental, and may be replaced
|
||||
by a higher-level interface in future versions of Spack.
|
||||
|
||||
.. _package_class_structure:
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
|
@@ -24,6 +24,7 @@
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
@@ -2838,6 +2839,25 @@ def temporary_dir(
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
||||
for functions or external tools that do not support in-place editing. Notice that this function
|
||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
||||
since we assume the call site will create a new inode at the same path."""
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
||||
)
|
||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
||||
os.close(tmp_fd)
|
||||
try:
|
||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
||||
yield tmp_path
|
||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.24.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -571,8 +571,13 @@ def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
reserved_names = ("all",)
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if pkg_name in reserved_names:
|
||||
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
|
@@ -87,6 +87,8 @@
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
@@ -252,7 +254,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any)
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
@@ -2332,7 +2334,9 @@ def is_backup_file(file):
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
codesign("-fs-", binary)
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
|
@@ -56,7 +56,6 @@
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
@@ -883,6 +882,9 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def accept(self, item):
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
@@ -920,19 +922,19 @@ def effective_deptypes(
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
topo_sorted_edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges(specs),
|
||||
visitor=EnvironmentVisitor(*specs, context=context),
|
||||
key=traverse.by_dag_hash,
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in visitor.edges:
|
||||
for edge in topo_sorted_edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
@@ -1375,7 +1377,7 @@ def exitcode_msg(p):
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -63,7 +64,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
@@ -71,7 +72,7 @@ def ensure_build_dependencies_or_raise(
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
dependencies: list of package names of required build dependencies
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
@@ -127,8 +128,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
"""Base class for builders to register common checks"""
|
||||
class BuilderWithDefaults(spack.builder.Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
spack.builder.run_after("install")(sanity_check_prefix)
|
||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -15,6 +15,9 @@
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
@@ -22,7 +25,7 @@
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
@@ -69,14 +72,14 @@ def flags_to_build_system_args(self, flags):
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return self.builder.enable_or_disable(*args, **kwargs)
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return self.builder.with_or_without(*args, **kwargs)
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("autotools")
|
||||
class AutotoolsBuilder(BaseBuilder):
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
@@ -157,7 +160,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
def patch_config_files(self) -> bool:
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
@@ -177,7 +180,7 @@ def patch_config_files(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
def _removed_la_files_log(self) -> str:
|
||||
"""File containing the list of removed libtool archives"""
|
||||
build_dir = self.build_directory
|
||||
if not os.path.isabs(self.build_directory):
|
||||
@@ -185,15 +188,15 @@ def _removed_la_files_log(self):
|
||||
return os.path.join(build_dir, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def _do_patch_config_files(self):
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
@@ -294,7 +297,7 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -304,8 +307,8 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _patch_usr_bin_file(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
@@ -316,8 +319,8 @@ def _patch_usr_bin_file(self):
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
@@ -330,8 +333,8 @@ def _set_autotools_environment_variables(self):
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
@@ -358,8 +361,8 @@ def _do_patch_libtool_configure(self):
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
@@ -507,27 +510,64 @@ def _do_patch_libtool(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self):
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self):
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
|
||||
@spack.builder.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self):
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
@@ -554,39 +594,12 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def set_configure_or_die(self):
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
@@ -597,7 +610,12 @@ def configure(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
@@ -605,41 +623,49 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the option that is being activated or not
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -647,19 +673,19 @@ def _activate_or_not(
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
)
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
@@ -679,8 +705,8 @@ def _activate_or_not(
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec = self.pkg.spec
|
||||
args = []
|
||||
spec: spack.spec.Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
@@ -698,7 +724,7 @@ def _activate_or_not(
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)):
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
@@ -709,14 +735,12 @@ def _activate_or_not(
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = "{0}_or_{1}_{2}".format(
|
||||
activation_word, deactivation_word, option_value
|
||||
)
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
@@ -725,19 +749,24 @@ def _activate_or_not(
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = "--{0}-{1}".format(activation_word, option_value)
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line += "={0}".format(activation_value(option_value))
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
return line
|
||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -752,12 +781,11 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -765,18 +793,22 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -784,15 +816,15 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def remove_libtool_archives(self):
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
@@ -814,12 +846,13 @@ def setup_build_environment(self, env):
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec):
|
||||
dirs_seen = set()
|
||||
flags_spack, flags_external = [], []
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.phase_callbacks
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -192,7 +192,10 @@ def initconfig_mpi_entries(self):
|
||||
|
||||
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
||||
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# not all MPIs have Fortran wrappers
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
@@ -332,7 +335,7 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
@@ -27,7 +28,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
class CargoBuilder(BaseBuilder):
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -77,7 +78,7 @@ def install(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
|
@@ -9,7 +9,7 @@
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import List, Optional, Set, Tuple
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
@@ -18,11 +18,15 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack import traverse
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -48,9 +52,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -85,7 +89,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
@@ -93,30 +97,36 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
@@ -157,15 +167,18 @@ def _values(x):
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges([pkg.spec]),
|
||||
visitor=traverse.MixedDepthVisitor(
|
||||
direct=dt.BUILD | dt.TEST, transitive=dt.LINK | dt.RUN, key=traverse.by_dag_hash
|
||||
),
|
||||
key=traverse.by_dag_hash,
|
||||
root=False,
|
||||
all_edges=False, # cover all nodes, not all edges
|
||||
)
|
||||
ordered_specs = [edge.spec for edge in edges]
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
@@ -263,15 +276,15 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, *args, **kwargs):
|
||||
return self.builder.define(*args, **kwargs)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
def define_from_variant(self, *args, **kwargs):
|
||||
return self.builder.define_from_variant(*args, **kwargs)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
|
||||
|
||||
@spack.builder.builder("cmake")
|
||||
class CMakeBuilder(BaseBuilder):
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
@@ -321,15 +334,15 @@ class CMakeBuilder(BaseBuilder):
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self):
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -338,16 +351,17 @@ def root_cmakelists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
def generator(self) -> str:
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
@@ -356,7 +370,9 @@ def std_cmake_args(self):
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
def std_args(
|
||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
||||
) -> List[str]:
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
@@ -373,7 +389,6 @@ def std_args(pkg, generator=None):
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
@@ -405,152 +420,31 @@ def std_args(pkg, generator=None):
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON
|
||||
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||
values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define('BUILD_SHARED_LIBS', True),
|
||||
define('CMAKE_CXX_STANDARD', 14),
|
||||
define('swr', ['avx', 'avx2'])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
def define_from_variant(self, cmake_var, variant=None):
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('cxxstd', default='11', values=('11', '14'),
|
||||
multi=False, description='')
|
||||
variant('shared', default=True, description='')
|
||||
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||
description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met,
|
||||
this function returns an empty string. CMake discards empty strings
|
||||
provided on the command line.
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return self.define(cmake_var, value)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
def build_dirname(self) -> str:
|
||||
"""Directory name to use when building the package."""
|
||||
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self):
|
||||
def cmake_args(self) -> List[str]:
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
@@ -560,7 +454,12 @@ def cmake_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
@@ -575,7 +474,12 @@ def cmake(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -584,7 +488,12 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -592,9 +501,9 @@ def install(self, pkg, spec, prefix):
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
@@ -605,3 +514,133 @@ def check(self):
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(
|
||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
||||
|
@@ -180,13 +180,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
||||
@@ -212,9 +205,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
|
||||
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
|
||||
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
|
||||
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
||||
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
||||
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
||||
|
@@ -7,8 +7,9 @@
|
||||
import spack.builder
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(spack.package_base.PackageBase):
|
||||
@@ -26,7 +27,7 @@ class Package(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("generic")
|
||||
class GenericBuilder(BaseBuilder):
|
||||
class GenericBuilder(BuilderWithDefaults):
|
||||
"""A builder for a generic build system, that require packagers
|
||||
to implement an "install" phase.
|
||||
"""
|
||||
@@ -44,7 +45,7 @@ class GenericBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class GoPackage(spack.package_base.PackageBase):
|
||||
@@ -32,7 +33,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
class GoBuilder(BaseBuilder):
|
||||
class GoBuilder(BuilderWithDefaults):
|
||||
"""The Go builder encodes the most common way of building software with
|
||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -99,7 +100,7 @@ def install(self, pkg, spec, prefix):
|
||||
fs.mkdirp(prefix.bin)
|
||||
fs.install(pkg.name, prefix.bin)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
|
@@ -22,8 +22,8 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
|
||||
debug_print(license_type)
|
||||
return license_type
|
||||
|
||||
@spack.builder.run_before("install")
|
||||
@spack.phase_callbacks.run_before("install")
|
||||
def configure(self):
|
||||
"""Generates the silent.cfg file to pass to installer.sh.
|
||||
|
||||
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
@@ -1258,7 +1258,7 @@ def validate_install(self):
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError("The installer has failed to install anything.")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_rpath(self):
|
||||
if "+rpath" not in self.spec:
|
||||
return
|
||||
@@ -1276,7 +1276,7 @@ def configure_rpath(self):
|
||||
with open(compiler_cfg, "w") as fh:
|
||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_auto_dispatch(self):
|
||||
if self._has_compilers:
|
||||
if "auto_dispatch=none" in self.spec:
|
||||
@@ -1300,7 +1300,7 @@ def configure_auto_dispatch(self):
|
||||
with open(compiler_cfg, "a") as fh:
|
||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def filter_compiler_wrappers(self):
|
||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||
bin_dir = self.component_bin_dir("mpi")
|
||||
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
|
||||
f = os.path.join(bin_dir, f)
|
||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def uninstall_ism(self):
|
||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
|
||||
debug_print(d)
|
||||
return d
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def modify_LLVMgold_rpath(self):
|
||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||
|
||||
|
@@ -8,11 +8,14 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
@@ -36,7 +39,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
class MakefileBuilder(BaseBuilder):
|
||||
class MakefileBuilder(BuilderWithDefaults):
|
||||
"""The Makefile builder encodes the most common way of building software with
|
||||
Makefiles. It has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -91,35 +94,50 @@ class MakefileBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Return the directory containing the main Makefile."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(self, pkg, spec, prefix):
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
@@ -127,4 +145,4 @@ def installcheck(self):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MavenPackage(spack.package_base.PackageBase):
|
||||
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("maven")
|
||||
class MavenBuilder(BaseBuilder):
|
||||
class MavenBuilder(BuilderWithDefaults):
|
||||
"""The Maven builder encodes the default way to build software with Maven.
|
||||
It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -9,10 +9,13 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class MesonPackage(spack.package_base.PackageBase):
|
||||
@@ -62,7 +65,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
|
||||
@spack.builder.builder("meson")
|
||||
class MesonBuilder(BaseBuilder):
|
||||
class MesonBuilder(BuilderWithDefaults):
|
||||
"""The Meson builder encodes the default way to build software with Meson.
|
||||
The builder has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -112,7 +115,7 @@ def archive_files(self):
|
||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||
|
||||
@property
|
||||
def root_mesonlists_dir(self):
|
||||
def root_mesonlists_dir(self) -> str:
|
||||
"""Relative path to the directory containing meson.build
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -121,7 +124,7 @@ def root_mesonlists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def std_meson_args(self):
|
||||
def std_meson_args(self) -> List[str]:
|
||||
"""Standard meson arguments provided as a property for convenience
|
||||
of package writers.
|
||||
"""
|
||||
@@ -132,7 +135,7 @@ def std_meson_args(self):
|
||||
return std_meson_args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg):
|
||||
def std_args(pkg) -> List[str]:
|
||||
"""Standard meson arguments for a generic package."""
|
||||
try:
|
||||
build_type = pkg.spec.variants["buildtype"].value
|
||||
@@ -172,7 +175,7 @@ def build_directory(self):
|
||||
"""Directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def meson_args(self):
|
||||
def meson_args(self) -> List[str]:
|
||||
"""List of arguments that must be passed to meson, except:
|
||||
|
||||
* ``--prefix``
|
||||
@@ -185,7 +188,12 @@ def meson_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def meson(self, pkg, spec, prefix):
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
@@ -196,21 +204,31 @@ def meson(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MSBuildPackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
class MSBuildBuilder(BaseBuilder):
|
||||
class MSBuildBuilder(BuilderWithDefaults):
|
||||
"""The MSBuild builder encodes the most common way of building software with
|
||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class NMakePackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
class NMakeBuilder(BaseBuilder):
|
||||
class NMakeBuilder(BuilderWithDefaults):
|
||||
"""The NMake builder encodes the most common way of building software with
|
||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class OctavePackage(spack.package_base.PackageBase):
|
||||
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("octave")
|
||||
class OctaveBuilder(BaseBuilder):
|
||||
class OctaveBuilder(BuilderWithDefaults):
|
||||
"""The octave builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.OctaveBuilder.install`
|
||||
|
@@ -255,7 +255,7 @@ def libs(self):
|
||||
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
|
||||
"""Base class for Intel oneAPI library packages with SDK components.
|
||||
|
||||
Contains some convenient default implementations for libraries
|
||||
|
@@ -10,11 +10,12 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
@@ -84,7 +85,7 @@ def test_use(self):
|
||||
|
||||
|
||||
@spack.builder.builder("perl")
|
||||
class PerlBuilder(BaseBuilder):
|
||||
class PerlBuilder(BuilderWithDefaults):
|
||||
"""The perl builder provides four phases that can be overridden, if required:
|
||||
|
||||
1. :py:meth:`~.PerlBuilder.configure`
|
||||
@@ -163,7 +164,7 @@ def configure(self, pkg, spec, prefix):
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||
# the Build script.
|
||||
@spack.builder.run_after("configure")
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def fix_shebang(self):
|
||||
if self.build_method == "Build.PL":
|
||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||
@@ -175,7 +176,7 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_executable()
|
||||
|
||||
# Ensure that tests run after build (if requested):
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
|
@@ -24,6 +24,7 @@
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -34,7 +35,7 @@
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
@@ -374,7 +375,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
def python_spec(self) -> Spec:
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
@@ -425,7 +426,7 @@ def libs(self) -> LibraryList:
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
class PythonPipBuilder(BuilderWithDefaults):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
@@ -543,4 +544,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class QMakePackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
class QMakeBuilder(BaseBuilder):
|
||||
class QMakeBuilder(BuilderWithDefaults):
|
||||
"""The qmake builder provides three phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||
@@ -81,4 +82,4 @@ def check(self):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("ruby")
|
||||
class RubyBuilder(BaseBuilder):
|
||||
class RubyBuilder(BuilderWithDefaults):
|
||||
"""The Ruby builder provides two phases that can be overridden if required:
|
||||
|
||||
#. :py:meth:`~.RubyBuilder.build`
|
||||
|
@@ -4,9 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class SConsPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +29,7 @@ class SConsPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("scons")
|
||||
class SConsBuilder(BaseBuilder):
|
||||
class SConsBuilder(BuilderWithDefaults):
|
||||
"""The Scons builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.SConsBuilder.build`
|
||||
@@ -79,4 +80,4 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -11,11 +11,12 @@
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class SIPPackage(spack.package_base.PackageBase):
|
||||
@@ -103,7 +104,7 @@ def test_imports(self):
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
class SIPBuilder(BaseBuilder):
|
||||
class SIPBuilder(BuilderWithDefaults):
|
||||
"""The SIP builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -170,4 +171,4 @@ def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
|
||||
|
||||
class WafPackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class WafPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("waf")
|
||||
class WafBuilder(BaseBuilder):
|
||||
class WafBuilder(BuilderWithDefaults):
|
||||
"""The WAF builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -136,7 +137,7 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def install_test(self):
|
||||
"""Run unit tests after install.
|
||||
@@ -146,4 +147,4 @@ def install_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,44 +6,30 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
from typing import Dict, List, Optional, Tuple, Type
|
||||
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
|
||||
#: An object of this kind is a shared global state used to collect callbacks during
|
||||
#: class definition time, and is flushed when the class object is created at the end
|
||||
#: of the class definition
|
||||
#:
|
||||
#: Args:
|
||||
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||
CallbackTemporaryStage = collections.namedtuple(
|
||||
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||
)
|
||||
|
||||
#: Shared global state to aggregate "@run_before" callbacks
|
||||
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||
#: Shared global state to aggregate "@run_after" callbacks
|
||||
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
|
||||
|
||||
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||
#: builders for the same package object.
|
||||
_BUILDERS = {}
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
|
||||
|
||||
def builder(build_system_name):
|
||||
def builder(build_system_name: str):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
Args:
|
||||
build_system_name (str): name of the build-system
|
||||
build_system_name: name of the build-system
|
||||
"""
|
||||
|
||||
def _decorator(cls):
|
||||
@@ -54,13 +40,9 @@ def _decorator(cls):
|
||||
return _decorator
|
||||
|
||||
|
||||
def create(pkg):
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the builder object that can install it.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||
"""
|
||||
def create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Given a package object with an associated concrete spec, return the builder object that can
|
||||
install it."""
|
||||
if id(pkg) not in _BUILDERS:
|
||||
_BUILDERS[id(pkg)] = _create(pkg)
|
||||
return _BUILDERS[id(pkg)]
|
||||
@@ -75,7 +57,7 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
@@ -83,7 +65,7 @@ def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg):
|
||||
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
The function inspects the build-system used by the package object and try to:
|
||||
@@ -103,7 +85,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
to look for build-related methods in the ``*Package``.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
pkg: package object for which we need a builder
|
||||
"""
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
@@ -168,8 +150,8 @@ def __forward(self, *args, **kwargs):
|
||||
# with the same name is defined in the Package, it will override this definition
|
||||
# (when _ForwardToBaseBuilder is initialized)
|
||||
for method_name in (
|
||||
base_cls.phases
|
||||
+ base_cls.legacy_methods
|
||||
base_cls.phases # type: ignore
|
||||
+ base_cls.legacy_methods # type: ignore
|
||||
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||
):
|
||||
@@ -181,14 +163,14 @@ def __forward(self):
|
||||
|
||||
return __forward
|
||||
|
||||
for attribute_name in base_cls.legacy_attributes:
|
||||
for attribute_name in base_cls.legacy_attributes: # type: ignore
|
||||
setattr(
|
||||
_ForwardToBaseBuilder,
|
||||
attribute_name,
|
||||
property(forward_property_to_getattr(attribute_name)),
|
||||
)
|
||||
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
|
||||
def __init__(self, pkg):
|
||||
# Deal with custom phases in packages here
|
||||
if hasattr(pkg, "phases"):
|
||||
@@ -213,99 +195,18 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
return Adapter(pkg)
|
||||
|
||||
|
||||
def buildsystem_name(pkg):
|
||||
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the name of its build system.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want
|
||||
the build system name
|
||||
"""
|
||||
return the name of its build system."""
|
||||
try:
|
||||
return pkg.spec.variants["build_system"].value
|
||||
except KeyError:
|
||||
# We are reading an old spec without the build_system variant
|
||||
return pkg.legacy_buildsystem
|
||||
|
||||
|
||||
class PhaseCallbacksMeta(type):
|
||||
"""Permit to register arbitrary functions during class definition and run them
|
||||
later, before or after a given install phase.
|
||||
|
||||
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||
stored in a global shared state when a class being defined is parsed by the Python
|
||||
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||
of callbacks is attached to the class being defined.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
||||
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def run_after(phase, when=None):
|
||||
"""Decorator to register a function for running after a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase after which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_AFTER.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
@staticmethod
|
||||
def run_before(phase, when=None):
|
||||
"""Decorator to register a function for running before a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase before which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_BEFORE.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
return pkg.legacy_buildsystem # type: ignore
|
||||
|
||||
|
||||
class BuilderMeta(
|
||||
PhaseCallbacksMeta,
|
||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
@@ -400,8 +301,12 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
)
|
||||
|
||||
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_BEFORE.attribute_name
|
||||
)
|
||||
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_AFTER.attribute_name
|
||||
)
|
||||
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@@ -421,8 +326,8 @@ def __init__(self, name, builder):
|
||||
self.name = name
|
||||
self.builder = builder
|
||||
self.phase_fn = self._select_phase_fn()
|
||||
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
|
||||
|
||||
def _make_callbacks(self, callbacks_attribute):
|
||||
result = []
|
||||
@@ -483,15 +388,103 @@ def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
|
||||
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""A builder is a class that, given a package object (i.e. associated with
|
||||
concrete spec), knows how to install it.
|
||||
class BaseBuilder(metaclass=BuilderMeta):
|
||||
"""An interface for builders, without any phases defined. This class is exposed in the package
|
||||
API, so that packagers can create a single class to define ``setup_build_environment`` and
|
||||
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the
|
||||
"phases" of the installation in the correct order.
|
||||
Example:
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object to be built
|
||||
.. code-block:: python
|
||||
|
||||
class AnyBuilder(BaseBuilder):
|
||||
@run_after("install")
|
||||
def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
|
||||
pass
|
||||
"""
|
||||
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
self.pkg = pkg
|
||||
|
||||
@property
|
||||
def spec(self) -> spack.spec.Spec:
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env) # type: ignore
|
||||
|
||||
def setup_dependent_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
"""Sets up the build environment of a package that depends on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to modify the build
|
||||
environment of a package that *depends* on this one.
|
||||
|
||||
This gives packages the ability to set environment variables for the build of the
|
||||
dependent, which can be useful to provide search hints for headers or libraries if they are
|
||||
not in standard locations.
|
||||
|
||||
This method will be called before the dependent package prefix exists in Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec: the spec of the dependent package about to be built. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
|
||||
|
||||
def __repr__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
|
||||
|
||||
def __str__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
|
||||
|
||||
|
||||
class Builder(BaseBuilder, collections.abc.Sequence):
|
||||
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
|
||||
knows how to install it.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the "phases" of the
|
||||
installation in the correct order.
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
@@ -506,95 +499,22 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
build_time_test_callbacks: List[str]
|
||||
install_time_test_callbacks: List[str]
|
||||
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files: List[str] = []
|
||||
#: List of glob expressions. Each expression must either be absolute or relative to the package
|
||||
#: source path. Matching artifacts found at the end of the build process will be copied in the
|
||||
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
super().__init__(pkg)
|
||||
self.callbacks = {}
|
||||
for phase in self.phases:
|
||||
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||
|
||||
@property
|
||||
def spec(self):
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to
|
||||
modify the build environments of packages that *depend* on this one.
|
||||
|
||||
This gives packages like Python and others that follow the extension
|
||||
model a way to implement common environment or compile-time settings
|
||||
for dependencies.
|
||||
|
||||
This method will be called before the dependent package prefix exists
|
||||
in Spack's store.
|
||||
|
||||
Examples:
|
||||
1. Installing python modules generally requires ``PYTHONPATH``
|
||||
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||
module's install prefix. This method could be used to set that
|
||||
variable.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||
about to be built. This allows the extendee (self) to query
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
return self.callbacks[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.phases)
|
||||
|
||||
def __repr__(self):
|
||||
msg = "{0}({1})"
|
||||
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
def __str__(self):
|
||||
msg = '"{0}" builder for "{1}"'
|
||||
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
|
||||
# Export these names as standalone to be used in packages
|
||||
run_after = PhaseCallbacksMeta.run_after
|
||||
run_before = PhaseCallbacksMeta.run_before
|
||||
|
@@ -32,6 +32,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.error
|
||||
@@ -1387,7 +1388,11 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||
for file in [
|
||||
job_pkg.log_path,
|
||||
job_pkg.env_mods_path,
|
||||
*spack.builder.create(job_pkg).archive_files,
|
||||
]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
|
||||
|
||||
|
@@ -8,7 +8,8 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Union
|
||||
from collections import Counter
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -24,6 +25,7 @@
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -31,6 +33,8 @@
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
|
||||
@@ -189,6 +193,43 @@ def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
concrete or abstract.concrete or abstract.abstract_hash
|
||||
for abstract, concrete in to_concretize
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
# If unify: true, check that specs don't conflict
|
||||
# Since all concrete, "when_possible" is not relevant
|
||||
if unify is True: # True, "when_possible", False are possible values
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
specs_per_name = Counter(
|
||||
spec.name
|
||||
for spec in traverse.traverse_nodes(
|
||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
||||
)
|
||||
|
||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
||||
if conflicts:
|
||||
raise spack.error.SpecError(
|
||||
"Specs conflict and `concretizer:unify` is configured true.",
|
||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
||||
)
|
||||
return ret
|
||||
|
||||
# Standard case
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
@@ -228,39 +269,48 @@ def matching_specs_from_env(specs):
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
def disambiguate_spec(
|
||||
spec: spack.spec.Spec,
|
||||
env: Optional[ev.Environment],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
Args:
|
||||
spec: a spec to disambiguate
|
||||
env: a spack environment, if one is active, or None if no environment is active
|
||||
local: do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
|
||||
|
||||
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: List[str],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
spec: a spec to disambiguate
|
||||
hashes: a set of hashes of specs among which to disambiguate
|
||||
local: if True, do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
tty.die(f"Spec '{spec}' matches no installed packages.")
|
||||
|
||||
elif first:
|
||||
return matching_specs[0]
|
||||
|
@@ -34,6 +34,8 @@
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
@@ -308,7 +310,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
return [
|
||||
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
|
@@ -23,9 +23,10 @@
|
||||
import spack.installer
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
@@ -95,8 +96,12 @@ def deprecate(parser, args):
|
||||
if len(specs) != 2:
|
||||
raise SpackError("spack deprecate requires exactly two specs")
|
||||
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
|
||||
deprecate = spack.cmd.disambiguate_spec(
|
||||
specs[0],
|
||||
env,
|
||||
local=True,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
)
|
||||
|
||||
if args.install:
|
||||
deprecator = specs[1].concretized()
|
||||
|
@@ -17,7 +17,8 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
@@ -137,21 +138,22 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--loaded", action="store_true", help="show only packages loaded in the user environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
only_missing_or_deprecated = subparser.add_mutually_exclusive_group()
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"-M",
|
||||
"--only-missing",
|
||||
action="store_true",
|
||||
dest="only_missing",
|
||||
help="show only missing dependencies",
|
||||
)
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--deprecated",
|
||||
action="store_true",
|
||||
help="show deprecated packages as well as installed specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
@@ -165,14 +167,23 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def query_arguments(args):
|
||||
# Set up query arguments.
|
||||
installed = []
|
||||
if not (args.only_missing or args.only_deprecated):
|
||||
installed.append(InstallStatuses.INSTALLED)
|
||||
if (args.deprecated or args.only_deprecated) and not args.only_missing:
|
||||
installed.append(InstallStatuses.DEPRECATED)
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
if args.only_missing and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing")
|
||||
|
||||
if args.only_deprecated and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
|
||||
|
||||
installed = InstallRecordStatus.INSTALLED
|
||||
if args.only_missing:
|
||||
installed = InstallRecordStatus.MISSING
|
||||
elif args.only_deprecated:
|
||||
installed = InstallRecordStatus.DEPRECATED
|
||||
|
||||
if args.missing:
|
||||
installed |= InstallRecordStatus.MISSING
|
||||
|
||||
if args.deprecated:
|
||||
installed |= InstallRecordStatus.DEPRECATED
|
||||
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
|
@@ -78,8 +78,8 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with pgi compiler, with support for multiple fabrics
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
@@ -202,11 +203,13 @@ def print_namespace(pkg, args):
|
||||
def print_phases(pkg, args):
|
||||
"""output installation phases"""
|
||||
|
||||
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
|
||||
builder = spack.builder.create(pkg)
|
||||
|
||||
if hasattr(builder, "phases") and builder.phases:
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Installation Phases:"))
|
||||
phase_str = ""
|
||||
for phase in pkg.builder.phases:
|
||||
for phase in builder.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
|
@@ -10,7 +10,8 @@
|
||||
import spack.cmd
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
section = "admin"
|
||||
@@ -67,8 +68,7 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = False
|
||||
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED]
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.lmod
|
||||
|
||||
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.tcl
|
||||
|
||||
|
||||
|
@@ -82,14 +82,6 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
@@ -99,46 +91,35 @@ def spec(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
elif env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
if not args.format:
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
concrete_specs = env.concrete_roots()
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
|
@@ -3,18 +3,21 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
from spack.util.executable import which
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
section = "developer"
|
||||
@@ -36,10 +39,7 @@ def grouper(iterable, n, fillvalue=None):
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
tool_names = ["import", "isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: warnings to ignore in mypy
|
||||
mypy_ignores = [
|
||||
@@ -61,14 +61,28 @@ def is_package(f):
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
class tool:
|
||||
def __init__(self, name, required=False):
|
||||
def __init__(self, name: str, required: bool = False, external: bool = True) -> None:
|
||||
self.name = name
|
||||
self.external = external
|
||||
self.required = required
|
||||
|
||||
def __call__(self, fun):
|
||||
tools[self.name] = (fun, self.required)
|
||||
self.fun = fun
|
||||
tools[self.name] = self
|
||||
return fun
|
||||
|
||||
@property
|
||||
def installed(self) -> bool:
|
||||
return bool(which(self.name)) if self.external else True
|
||||
|
||||
@property
|
||||
def executable(self) -> Optional[Executable]:
|
||||
return which(self.name) if self.external else None
|
||||
|
||||
|
||||
#: tools we run in spack style
|
||||
tools: Dict[str, tool] = {}
|
||||
|
||||
|
||||
def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
"""Get list of changed files in the Spack repository.
|
||||
@@ -176,22 +190,22 @@ def setup_parser(subparser):
|
||||
"-t",
|
||||
"--tool",
|
||||
action="append",
|
||||
help="specify which tools to run (default: %s)" % ",".join(tool_names),
|
||||
help="specify which tools to run (default: %s)" % ", ".join(tool_names),
|
||||
)
|
||||
tool_group.add_argument(
|
||||
"-s",
|
||||
"--skip",
|
||||
metavar="TOOL",
|
||||
action="append",
|
||||
help="specify tools to skip (choose from %s)" % ",".join(tool_names),
|
||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
|
||||
def cwd_relative(path, args):
|
||||
def cwd_relative(path, root, initial_working_dir):
|
||||
"""Translate prefix-relative path to current working directory-relative."""
|
||||
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
|
||||
return os.path.relpath(os.path.join(root, path), initial_working_dir)
|
||||
|
||||
|
||||
def rewrite_and_print_output(
|
||||
@@ -201,7 +215,10 @@ def rewrite_and_print_output(
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
return replacement.format(
|
||||
cwd_relative(match.group(1), args.root, args.initial_working_dir),
|
||||
*list(match.groups()[1:]),
|
||||
)
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
@@ -220,7 +237,7 @@ def print_style_header(file_list, args, tools_to_run):
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
paths = [cwd_relative(filename, args) for filename in paths]
|
||||
paths = [cwd_relative(filename, args.root, args.initial_working_dir) for filename in paths]
|
||||
|
||||
tty.msg("Modified files", *paths)
|
||||
sys.stdout.flush()
|
||||
@@ -352,17 +369,137 @@ def run_black(black_cmd, file_list, args):
|
||||
return returncode
|
||||
|
||||
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
f2 = os.path.join(root, "lib", "spack", *parts, "__init__.py")
|
||||
|
||||
if (
|
||||
os.path.exists(f1)
|
||||
# ensure case sensitive match
|
||||
and f"{parts[-1]}.py" in os.listdir(os.path.dirname(f1))
|
||||
or os.path.exists(f2)
|
||||
):
|
||||
return ".".join(parts)
|
||||
parts.pop()
|
||||
return None
|
||||
|
||||
|
||||
def _run_import_check(
|
||||
file_list: List[str],
|
||||
*,
|
||||
fix: bool,
|
||||
root_relative: bool,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=spack.paths.prefix,
|
||||
out=sys.stdout,
|
||||
):
|
||||
if sys.version_info < (3, 9):
|
||||
print("import check requires Python 3.9 or later")
|
||||
return 0
|
||||
|
||||
is_use = re.compile(r"(?<!from )(?<!import )(?:llnl|spack)\.[a-zA-Z0-9_\.]+")
|
||||
|
||||
# redundant imports followed by a `# comment` are ignored, cause there can be legimitate reason
|
||||
# to import a module: execute module scope init code, or to deal with circular imports.
|
||||
is_abs_import = re.compile(r"^import ((?:llnl|spack)\.[a-zA-Z0-9_\.]+)$", re.MULTILINE)
|
||||
|
||||
exit_code = 0
|
||||
|
||||
for file in file_list:
|
||||
to_add = set()
|
||||
to_remove = []
|
||||
|
||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||
|
||||
try:
|
||||
with open(file, "r") as f:
|
||||
contents = open(file, "r").read()
|
||||
parsed = ast.parse(contents)
|
||||
except Exception:
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: could not parse", file=out)
|
||||
continue
|
||||
|
||||
for m in is_abs_import.finditer(contents):
|
||||
if contents.count(m.group(1)) == 1:
|
||||
to_remove.append(m.group(0))
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||
|
||||
# Clear all strings to avoid matching comments/strings etc.
|
||||
for node in ast.walk(parsed):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
node.value = ""
|
||||
|
||||
filtered_contents = ast.unparse(parsed) # novermin
|
||||
for m in is_use.finditer(filtered_contents):
|
||||
module = _module_part(root, m.group(0))
|
||||
if not module or module in to_add:
|
||||
continue
|
||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||
continue
|
||||
to_add.add(module)
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: missing import: {module} ({m.group(0)})", file=out)
|
||||
|
||||
if not fix or not to_add and not to_remove:
|
||||
continue
|
||||
|
||||
with open(file, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if to_add:
|
||||
# insert missing imports before the first import, delegate ordering to isort
|
||||
for node in parsed.body:
|
||||
if isinstance(node, (ast.Import, ast.ImportFrom)):
|
||||
first_line = node.lineno
|
||||
break
|
||||
else:
|
||||
print(f"{pretty_path}: could not fix", file=out)
|
||||
continue
|
||||
lines.insert(first_line, "\n".join(f"import {x}" for x in to_add) + "\n")
|
||||
|
||||
new_contents = "".join(lines)
|
||||
|
||||
# remove redundant imports
|
||||
for statement in to_remove:
|
||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||
|
||||
with open(file, "w") as f:
|
||||
f.write(new_contents)
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
@tool("import", external=False)
|
||||
def run_import_check(import_check_cmd, file_list, args):
|
||||
exit_code = _run_import_check(
|
||||
file_list,
|
||||
fix=args.fix,
|
||||
root_relative=args.root_relative,
|
||||
root=args.root,
|
||||
working_dir=args.initial_working_dir,
|
||||
)
|
||||
print_tool_result("import", exit_code)
|
||||
return exit_code
|
||||
|
||||
|
||||
def validate_toolset(arg_value):
|
||||
"""Validate --tool and --skip arguments (sets of optionally comma-separated tools)."""
|
||||
tools = set(",".join(arg_value).split(",")) # allow args like 'isort,flake8'
|
||||
for tool in tools:
|
||||
if tool not in tool_names:
|
||||
tty.die("Invaild tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
tty.die("Invalid tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
return tools
|
||||
|
||||
|
||||
def missing_tools(tools_to_run):
|
||||
return [t for t in tools_to_run if which(t) is None]
|
||||
def missing_tools(tools_to_run: List[str]) -> List[str]:
|
||||
return [t for t in tools_to_run if not tools[t].installed]
|
||||
|
||||
|
||||
def _bootstrap_dev_dependencies():
|
||||
@@ -417,9 +554,9 @@ def prefix_relative(path):
|
||||
|
||||
print_style_header(file_list, args, tools_to_run)
|
||||
for tool_name in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
tool = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(which(tool_name), file_list, args)
|
||||
return_code |= tool.fun(tool.executable, file_list, args)
|
||||
|
||||
if return_code == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_branch = "releases/v0.23"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -17,7 +17,8 @@
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "remove installed packages"
|
||||
section = "build"
|
||||
@@ -99,12 +100,14 @@ def find_matching_specs(
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli: List["spack.spec.Spec"] = []
|
||||
specs_from_cli: List[spack.spec.Spec] = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.STORE.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin
|
||||
spec,
|
||||
hashes=hashes,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
origin=origin,
|
||||
)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
|
@@ -16,7 +16,6 @@
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf_r", os.path.join("xl_r", "xlf_r")),
|
||||
("xlf", os.path.join("xl", "xlf")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
@@ -25,7 +24,6 @@
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf90_r", os.path.join("xl_r", "xlf90_r")),
|
||||
("xlf90", os.path.join("xl", "xlf90")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
|
@@ -124,8 +124,8 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
if self.real_version >= Version("2021") and self.real_version < Version("2024"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
if self.real_version >= Version("2021") and self.real_version < Version("2025"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
@@ -155,10 +155,10 @@ def setup_custom_environment(self, pkg, env):
|
||||
# icx+icpx+ifx or icx+icpx+ifort. But to be on the safe side (some users may
|
||||
# want to try to swap icpx against icpc, for example), and since the Intel LLVM
|
||||
# compilers accept these diag-disable flags, we apply them for all compilers.
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
if self.real_version >= Version("2021") and self.real_version < Version("2024"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
if self.real_version >= Version("2021") and self.real_version < Version("2025"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
# 2024 release bumped the libsycl version because of an ABI
|
||||
|
@@ -1,77 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Pgi(Compiler):
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("pgi", "pgcc"),
|
||||
"cxx": os.path.join("pgi", "pgc++"),
|
||||
"f77": os.path.join("pgi", "pgfortran"),
|
||||
"fc": os.path.join("pgi", "pgfortran"),
|
||||
}
|
||||
|
||||
version_argument = "-V"
|
||||
ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
|
||||
version_regex = r"pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on "
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g", "-gopt"]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-mp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fpic"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fpic"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fpic"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fpic"
|
||||
|
||||
required_libs = ["libpgc", "libpgf90"]
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version >= Version("12.10"):
|
||||
return "-c99"
|
||||
raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 12.10")
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version >= Version("15.3"):
|
||||
return "-c11"
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 15.3")
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-pgc++libs",)
|
@@ -160,6 +160,11 @@ def concretize_separately(
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
msg = "Starting concretization"
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug(), maxtaskperchild=1
|
||||
|
@@ -69,6 +69,8 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
# TODO: Provide an API automatically retyring a build after detecting and
|
||||
# TODO: clearing a failure.
|
||||
|
||||
@@ -160,36 +162,12 @@ def converter(self, spec_like, *args, **kwargs):
|
||||
return converter
|
||||
|
||||
|
||||
class InstallStatus(str):
|
||||
pass
|
||||
|
||||
|
||||
class InstallStatuses:
|
||||
INSTALLED = InstallStatus("installed")
|
||||
DEPRECATED = InstallStatus("deprecated")
|
||||
MISSING = InstallStatus("missing")
|
||||
|
||||
@classmethod
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
def normalize_query(installed: Union[bool, InstallRecordStatus]) -> InstallRecordStatus:
|
||||
if installed is True:
|
||||
installed = InstallRecordStatus.INSTALLED
|
||||
elif installed is False:
|
||||
installed = InstallRecordStatus.MISSING
|
||||
return installed
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -227,8 +205,8 @@ def __init__(
|
||||
installation_time: Optional[float] = None,
|
||||
deprecated_for: Optional[str] = None,
|
||||
in_buildcache: bool = False,
|
||||
origin=None,
|
||||
):
|
||||
origin: Optional[str] = None,
|
||||
) -> None:
|
||||
self.spec = spec
|
||||
self.path = str(path) if path else None
|
||||
self.installed = bool(installed)
|
||||
@@ -239,14 +217,12 @@ def __init__(
|
||||
self.in_buildcache = in_buildcache
|
||||
self.origin = origin
|
||||
|
||||
def install_type_matches(self, installed):
|
||||
installed = InstallStatuses.canonicalize(installed)
|
||||
def install_type_matches(self, installed: InstallRecordStatus) -> bool:
|
||||
if self.installed:
|
||||
return InstallStatuses.INSTALLED in installed
|
||||
return InstallRecordStatus.INSTALLED in installed
|
||||
elif self.deprecated_for:
|
||||
return InstallStatuses.DEPRECATED in installed
|
||||
else:
|
||||
return InstallStatuses.MISSING in installed
|
||||
return InstallRecordStatus.DEPRECATED in installed
|
||||
return InstallRecordStatus.MISSING in installed
|
||||
|
||||
def to_dict(self, include_fields=DEFAULT_INSTALL_RECORD_FIELDS):
|
||||
rec_dict = {}
|
||||
@@ -1396,7 +1372,13 @@ def installed_extensions_for(self, extendee_spec: "spack.spec.Spec"):
|
||||
if spec.package.extends(extendee_spec):
|
||||
yield spec.package
|
||||
|
||||
def _get_by_hash_local(self, dag_hash, default=None, installed=any):
|
||||
def _get_by_hash_local(
|
||||
self,
|
||||
dag_hash: str,
|
||||
default: Optional[List["spack.spec.Spec"]] = None,
|
||||
installed: Union[bool, InstallRecordStatus] = InstallRecordStatus.ANY,
|
||||
) -> Optional[List["spack.spec.Spec"]]:
|
||||
installed = normalize_query(installed)
|
||||
# hash is a full hash and is in the data somewhere
|
||||
if dag_hash in self._data:
|
||||
rec = self._data[dag_hash]
|
||||
@@ -1405,8 +1387,7 @@ def _get_by_hash_local(self, dag_hash, default=None, installed=any):
|
||||
else:
|
||||
return default
|
||||
|
||||
# check if hash is a prefix of some installed (or previously
|
||||
# installed) spec.
|
||||
# check if hash is a prefix of some installed (or previously installed) spec.
|
||||
matches = [
|
||||
record.spec
|
||||
for h, record in self._data.items()
|
||||
@@ -1418,52 +1399,43 @@ def _get_by_hash_local(self, dag_hash, default=None, installed=any):
|
||||
# nothing found
|
||||
return default
|
||||
|
||||
def get_by_hash_local(self, dag_hash, default=None, installed=any):
|
||||
def get_by_hash_local(
|
||||
self,
|
||||
dag_hash: str,
|
||||
default: Optional[List["spack.spec.Spec"]] = None,
|
||||
installed: Union[bool, InstallRecordStatus] = InstallRecordStatus.ANY,
|
||||
) -> Optional[List["spack.spec.Spec"]]:
|
||||
"""Look up a spec in *this DB* by DAG hash, or by a DAG hash prefix.
|
||||
|
||||
Arguments:
|
||||
dag_hash (str): hash (or hash prefix) to look up
|
||||
default (object or None): default value to return if dag_hash is
|
||||
not in the DB (default: None)
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed
|
||||
specs in the search; if ``False`` only missing specs, and if
|
||||
``any``, all specs in database. If an InstallStatus or iterable
|
||||
of InstallStatus, returns specs whose install status
|
||||
(installed, deprecated, or missing) matches (one of) the
|
||||
InstallStatus. (default: any)
|
||||
Args:
|
||||
dag_hash: hash (or hash prefix) to look up
|
||||
default: default value to return if dag_hash is not in the DB
|
||||
installed: if ``True``, includes only installed specs in the search; if ``False``
|
||||
only missing specs. Otherwise, a InstallRecordStatus flag.
|
||||
|
||||
``installed`` defaults to ``any`` so that we can refer to any
|
||||
known hash. Note that ``query()`` and ``query_one()`` differ in
|
||||
that they only return installed specs by default.
|
||||
|
||||
Returns:
|
||||
(list): a list of specs matching the hash or hash prefix
|
||||
``installed`` defaults to ``InstallRecordStatus.ANY`` so we can refer to any known hash.
|
||||
|
||||
``query()`` and ``query_one()`` differ in that they only return installed specs by default.
|
||||
"""
|
||||
with self.read_transaction():
|
||||
return self._get_by_hash_local(dag_hash, default=default, installed=installed)
|
||||
|
||||
def get_by_hash(self, dag_hash, default=None, installed=any):
|
||||
def get_by_hash(
|
||||
self,
|
||||
dag_hash: str,
|
||||
default: Optional[List["spack.spec.Spec"]] = None,
|
||||
installed: Union[bool, InstallRecordStatus] = InstallRecordStatus.ANY,
|
||||
) -> Optional[List["spack.spec.Spec"]]:
|
||||
"""Look up a spec by DAG hash, or by a DAG hash prefix.
|
||||
|
||||
Arguments:
|
||||
dag_hash (str): hash (or hash prefix) to look up
|
||||
default (object or None): default value to return if dag_hash is
|
||||
not in the DB (default: None)
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed specs in the search; if ``False``
|
||||
only missing specs, and if ``any``, all specs in database. If an
|
||||
InstallStatus or iterable of InstallStatus, returns specs whose install
|
||||
status (installed, deprecated, or missing) matches (one of) the
|
||||
InstallStatus. (default: any)
|
||||
Args:
|
||||
dag_hash: hash (or hash prefix) to look up
|
||||
default: default value to return if dag_hash is not in the DB
|
||||
installed: if ``True``, includes only installed specs in the search; if ``False``
|
||||
only missing specs. Otherwise, a InstallRecordStatus flag.
|
||||
|
||||
``installed`` defaults to ``any`` so that we can refer to any
|
||||
known hash. Note that ``query()`` and ``query_one()`` differ in
|
||||
that they only return installed specs by default.
|
||||
|
||||
Returns:
|
||||
(list): a list of specs matching the hash or hash prefix
|
||||
``installed`` defaults to ``InstallRecordStatus.ANY`` so we can refer to any known hash.
|
||||
``query()`` and ``query_one()`` differ in that they only return installed specs by default.
|
||||
|
||||
"""
|
||||
|
||||
@@ -1483,7 +1455,7 @@ def _query(
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
@@ -1491,6 +1463,7 @@ def _query(
|
||||
in_buildcache: Optional[bool] = None,
|
||||
origin: Optional[str] = None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
installed = normalize_query(installed)
|
||||
|
||||
# Restrict the set of records over which we iterate first
|
||||
matching_hashes = self._data
|
||||
@@ -1560,7 +1533,7 @@ def query_local(
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
@@ -1620,7 +1593,7 @@ def query(
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
@@ -1628,7 +1601,7 @@ def query(
|
||||
hashes: Optional[List[str]] = None,
|
||||
origin: Optional[str] = None,
|
||||
install_tree: str = "all",
|
||||
):
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Queries the Spack database including all upstream databases.
|
||||
|
||||
Args:
|
||||
@@ -1709,13 +1682,14 @@ def query(
|
||||
)
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
return sorted(results)
|
||||
results.sort()
|
||||
return results
|
||||
|
||||
def query_one(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]],
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
"""Query for exactly one spec that matches the query spec.
|
||||
|
||||
|
@@ -34,12 +34,12 @@ class OpenMpi(Package):
|
||||
import collections.abc
|
||||
import os.path
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
|
||||
from typing import Any, Callable, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.util.crypto
|
||||
@@ -56,13 +56,8 @@ class OpenMpi(Package):
|
||||
VersionLookupError,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import spack.package_base
|
||||
|
||||
__all__ = [
|
||||
"DirectiveError",
|
||||
"DirectiveMeta",
|
||||
"DisableRedistribute",
|
||||
"version",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
@@ -77,6 +72,7 @@ class OpenMpi(Package):
|
||||
"build_system",
|
||||
"requires",
|
||||
"redistribute",
|
||||
"can_splice",
|
||||
]
|
||||
|
||||
_patch_order_index = 0
|
||||
@@ -84,15 +80,15 @@ class OpenMpi(Package):
|
||||
|
||||
SpecType = str
|
||||
DepType = Union[Tuple[str, ...], str]
|
||||
WhenType = Optional[Union["spack.spec.Spec", str, bool]]
|
||||
Patcher = Callable[[Union["spack.package_base.PackageBase", Dependency]], None]
|
||||
WhenType = Optional[Union[spack.spec.Spec, str, bool]]
|
||||
Patcher = Callable[[Union[spack.package_base.PackageBase, Dependency]], None]
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
def _make_when_spec(value: WhenType) -> Optional[spack.spec.Spec]:
|
||||
"""Create a ``Spec`` that indicates when a directive should be applied.
|
||||
|
||||
Directives with ``when`` specs, e.g.:
|
||||
@@ -137,7 +133,7 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
return spack.spec.Spec(value)
|
||||
|
||||
|
||||
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
|
||||
SubmoduleCallback = Callable[[spack.package_base.PackageBase], Union[str, List[str], bool]]
|
||||
directive = DirectiveMeta.directive
|
||||
|
||||
|
||||
@@ -253,8 +249,8 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
|
||||
|
||||
def _depends_on(
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: "spack.spec.Spec",
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
*,
|
||||
when: WhenType = None,
|
||||
type: DepType = dt.DEFAULT_TYPES,
|
||||
@@ -333,7 +329,7 @@ def conflicts(conflict_spec: SpecType, when: WhenType = None, msg: Optional[str]
|
||||
msg (str): optional user defined message
|
||||
"""
|
||||
|
||||
def _execute_conflicts(pkg: "spack.package_base.PackageBase"):
|
||||
def _execute_conflicts(pkg: spack.package_base.PackageBase):
|
||||
# If when is not specified the conflict always holds
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
@@ -374,19 +370,12 @@ def depends_on(
|
||||
assert type == "build", "languages must be of 'build' type"
|
||||
return _language(lang_spec_str=spec, when=when)
|
||||
|
||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
def _execute_depends_on(pkg: spack.package_base.PackageBase):
|
||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||
|
||||
return _execute_depends_on
|
||||
|
||||
|
||||
#: Store whether a given Spec source/binary should not be redistributed.
|
||||
class DisableRedistribute:
|
||||
def __init__(self, source, binary):
|
||||
self.source = source
|
||||
self.binary = binary
|
||||
|
||||
|
||||
@directive("disable_redistribute")
|
||||
def redistribute(source=None, binary=None, when: WhenType = None):
|
||||
"""Can be used inside a Package definition to declare that
|
||||
@@ -403,7 +392,7 @@ def redistribute(source=None, binary=None, when: WhenType = None):
|
||||
|
||||
|
||||
def _execute_redistribute(
|
||||
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
|
||||
pkg: spack.package_base.PackageBase, source=None, binary=None, when: WhenType = None
|
||||
):
|
||||
if source is None and binary is None:
|
||||
return
|
||||
@@ -433,7 +422,7 @@ def _execute_redistribute(
|
||||
if not binary:
|
||||
disable.binary = True
|
||||
else:
|
||||
pkg.disable_redistribute[when_spec] = DisableRedistribute(
|
||||
pkg.disable_redistribute[when_spec] = spack.package_base.DisableRedistribute(
|
||||
source=not source, binary=not binary
|
||||
)
|
||||
|
||||
@@ -479,7 +468,7 @@ def provides(*specs: SpecType, when: WhenType = None):
|
||||
when: condition when this provides clause needs to be considered
|
||||
"""
|
||||
|
||||
def _execute_provides(pkg: "spack.package_base.PackageBase"):
|
||||
def _execute_provides(pkg: spack.package_base.PackageBase):
|
||||
import spack.parser # Avoid circular dependency
|
||||
|
||||
when_spec = _make_when_spec(when)
|
||||
@@ -505,6 +494,43 @@ def _execute_provides(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_provides
|
||||
|
||||
|
||||
@directive("splice_specs")
|
||||
def can_splice(
|
||||
target: SpecType, *, when: SpecType, match_variants: Union[None, str, List[str]] = None
|
||||
):
|
||||
"""Packages can declare whether they are ABI-compatible with another package
|
||||
and thus can be spliced into concrete versions of that package.
|
||||
|
||||
Args:
|
||||
target: The spec that the current package is ABI-compatible with.
|
||||
|
||||
when: An anonymous spec constraining current package for when it is
|
||||
ABI-compatible with target.
|
||||
|
||||
match_variants: A list of variants that must match
|
||||
between target spec and current package, with special value '*'
|
||||
which matches all variants. Example: a variant is defined on both
|
||||
packages called json, and they are ABI-compatible whenever they agree on
|
||||
the json variant (regardless of whether it is turned on or off). Note
|
||||
that this cannot be applied to multi-valued variants and multi-valued
|
||||
variants will be skipped by '*'.
|
||||
"""
|
||||
|
||||
def _execute_can_splice(pkg: spack.package_base.PackageBase):
|
||||
when_spec = _make_when_spec(when)
|
||||
if isinstance(match_variants, str) and match_variants != "*":
|
||||
raise ValueError(
|
||||
"* is the only valid string for match_variants "
|
||||
"if looking to provide a single variant, use "
|
||||
f"[{match_variants}] instead"
|
||||
)
|
||||
if when_spec is None:
|
||||
return
|
||||
pkg.splice_specs[when_spec] = (spack.spec.Spec(target), match_variants)
|
||||
|
||||
return _execute_can_splice
|
||||
|
||||
|
||||
@directive("patches")
|
||||
def patch(
|
||||
url_or_filename: str,
|
||||
@@ -531,7 +557,7 @@ def patch(
|
||||
compressed URL patches)
|
||||
"""
|
||||
|
||||
def _execute_patch(pkg_or_dep: Union["spack.package_base.PackageBase", Dependency]):
|
||||
def _execute_patch(pkg_or_dep: Union[spack.package_base.PackageBase, Dependency]):
|
||||
pkg = pkg_or_dep
|
||||
if isinstance(pkg, Dependency):
|
||||
pkg = pkg.pkg
|
||||
@@ -855,7 +881,7 @@ def requires(*requirement_specs: str, policy="one_of", when=None, msg=None):
|
||||
msg: optional user defined message
|
||||
"""
|
||||
|
||||
def _execute_requires(pkg: "spack.package_base.PackageBase"):
|
||||
def _execute_requires(pkg: spack.package_base.PackageBase):
|
||||
if policy not in ("one_of", "any_of"):
|
||||
err_msg = (
|
||||
f"the 'policy' argument of the 'requires' directive in {pkg.name} is set "
|
||||
@@ -880,7 +906,7 @@ def _execute_requires(pkg: "spack.package_base.PackageBase"):
|
||||
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
|
||||
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
|
||||
|
||||
def _execute_languages(pkg: "spack.package_base.PackageBase"):
|
||||
def _execute_languages(pkg: spack.package_base.PackageBase):
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
15
lib/spack/spack/enums.py
Normal file
15
lib/spack/spack/enums.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Enumerations used throughout Spack"""
|
||||
import enum
|
||||
|
||||
|
||||
class InstallRecordStatus(enum.Flag):
|
||||
"""Enum flag to facilitate querying status from the DB"""
|
||||
|
||||
INSTALLED = enum.auto()
|
||||
DEPRECATED = enum.auto()
|
||||
MISSING = enum.auto()
|
||||
ANY = INSTALLED | DEPRECATED | MISSING
|
@@ -325,12 +325,7 @@ def write(self, spec, color=None, out=None):
|
||||
self._out = llnl.util.tty.color.ColorStream(out, color=color)
|
||||
|
||||
# We'll traverse the spec in topological order as we graph it.
|
||||
nodes_in_topological_order = [
|
||||
edge.spec
|
||||
for edge in spack.traverse.traverse_edges_topo(
|
||||
[spec], direction="children", deptype=self.depflag
|
||||
)
|
||||
]
|
||||
nodes_in_topological_order = list(spec.traverse(order="topo", deptype=self.depflag))
|
||||
nodes_in_topological_order.reverse()
|
||||
|
||||
# Work on a copy to be nondestructive
|
||||
|
@@ -23,7 +23,6 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
@@ -353,9 +352,7 @@ def status(self, name: str, status: "TestStatus", msg: Optional[str] = None):
|
||||
self.test_parts[part_name] = status
|
||||
self.counts[status] += 1
|
||||
|
||||
def phase_tests(
|
||||
self, builder: spack.builder.Builder, phase_name: str, method_names: List[str]
|
||||
):
|
||||
def phase_tests(self, builder, phase_name: str, method_names: List[str]):
|
||||
"""Execute the builder's package phase-time tests.
|
||||
|
||||
Args:
|
||||
@@ -378,23 +375,16 @@ def phase_tests(
|
||||
|
||||
for name in method_names:
|
||||
try:
|
||||
# Prefer the method in the package over the builder's.
|
||||
# We need this primarily to pick up arbitrarily named test
|
||||
# methods but also some build-time checks.
|
||||
fn = getattr(builder.pkg, name, getattr(builder, name))
|
||||
|
||||
msg = f"RUN-TESTS: {phase_name}-time tests [{name}]"
|
||||
print_message(logger, msg, verbose)
|
||||
|
||||
fn()
|
||||
|
||||
fn = getattr(builder, name, None) or getattr(builder.pkg, name)
|
||||
except AttributeError as e:
|
||||
msg = f"RUN-TESTS: method not implemented [{name}]"
|
||||
print_message(logger, msg, verbose)
|
||||
|
||||
self.add_failure(e, msg)
|
||||
print_message(logger, f"RUN-TESTS: method not implemented [{name}]", verbose)
|
||||
self.add_failure(e, f"RUN-TESTS: method not implemented [{name}]")
|
||||
if fail_fast:
|
||||
break
|
||||
continue
|
||||
|
||||
print_message(logger, f"RUN-TESTS: {phase_name}-time tests [{name}]", verbose)
|
||||
fn()
|
||||
|
||||
if have_tests:
|
||||
print_message(logger, "Completed testing", verbose)
|
||||
@@ -764,7 +754,7 @@ def virtuals(pkg):
|
||||
|
||||
# hack for compilers that are not dependencies (yet)
|
||||
# TODO: this all eventually goes away
|
||||
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
|
||||
c_names = ("gcc", "intel", "intel-parallel-studio")
|
||||
if pkg.name in c_names:
|
||||
v_names.extend(["c", "cxx", "fortran"])
|
||||
if pkg.spec.satisfies("llvm+clang"):
|
||||
|
@@ -50,6 +50,7 @@
|
||||
|
||||
import spack.binary_distribution as binary_distribution
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
@@ -212,7 +213,7 @@ def _check_last_phase(pkg: "spack.package_base.PackageBase") -> None:
|
||||
Raises:
|
||||
``BadInstallPhase`` if stop_before or last phase is invalid
|
||||
"""
|
||||
phases = pkg.builder.phases # type: ignore[attr-defined]
|
||||
phases = spack.builder.create(pkg).phases # type: ignore[attr-defined]
|
||||
if pkg.stop_before_phase and pkg.stop_before_phase not in phases: # type: ignore[attr-defined]
|
||||
raise BadInstallPhase(pkg.name, pkg.stop_before_phase) # type: ignore[attr-defined]
|
||||
|
||||
@@ -661,7 +662,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
spack.store.STORE.layout.metadata_path(pkg.spec), "archived-files"
|
||||
)
|
||||
|
||||
for glob_expr in pkg.builder.archive_files:
|
||||
for glob_expr in spack.builder.create(pkg).archive_files:
|
||||
# Check that we are trying to copy things that are
|
||||
# in the stage tree (not arbitrary files)
|
||||
abs_expr = os.path.realpath(glob_expr)
|
||||
@@ -2394,7 +2395,6 @@ def _install_source(self) -> None:
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
|
||||
def _real_install(self) -> None:
|
||||
import spack.builder
|
||||
|
||||
pkg = self.pkg
|
||||
|
||||
|
@@ -29,6 +29,7 @@
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -180,7 +181,7 @@ def ensure_mirror_usable(self, direction: str = "push"):
|
||||
if errors:
|
||||
msg = f"invalid {direction} configuration for mirror {self.name}: "
|
||||
msg += "\n ".join(errors)
|
||||
raise spack.mirror.MirrorError(msg)
|
||||
raise MirrorError(msg)
|
||||
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
# Only allow one to exist in the config
|
||||
|
@@ -10,7 +10,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.builder
|
||||
import spack.phase_callbacks
|
||||
|
||||
|
||||
def filter_compiler_wrappers(*files, **kwargs):
|
||||
@@ -111,4 +111,4 @@ def _filter_compiler_wrappers_impl(pkg_or_builder):
|
||||
if pkg.compiler.name == "nag":
|
||||
x.filter("-Wl,--enable-new-dtags", "", **filter_kwargs)
|
||||
|
||||
spack.builder.run_after(after)(_filter_compiler_wrappers_impl)
|
||||
spack.phase_callbacks.run_after(after)(_filter_compiler_wrappers_impl)
|
||||
|
@@ -39,7 +39,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.lang import Singleton, dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -246,7 +246,7 @@ def _generate_upstream_module_index():
|
||||
return UpstreamModuleIndex(spack.store.STORE.db, module_indices)
|
||||
|
||||
|
||||
upstream_module_index = llnl.util.lang.Singleton(_generate_upstream_module_index)
|
||||
upstream_module_index = Singleton(_generate_upstream_module_index)
|
||||
|
||||
|
||||
ModuleIndexEntry = collections.namedtuple("ModuleIndexEntry", ["path", "use_name"])
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||
from shutil import move, rmtree
|
||||
|
||||
from spack.error import InstallError
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
env = environ
|
||||
@@ -74,7 +74,7 @@
|
||||
from spack.build_systems.sourceware import SourcewarePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import run_after, run_before
|
||||
from spack.builder import BaseBuilder
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import *
|
||||
@@ -100,6 +100,7 @@
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.package_completions import *
|
||||
from spack.phase_callbacks import run_after, run_before
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
|
@@ -32,18 +32,18 @@
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.directives
|
||||
import spack.directives_meta
|
||||
import spack.error
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hooks
|
||||
import spack.mirror
|
||||
import spack.multimethod
|
||||
import spack.patch
|
||||
import spack.phase_callbacks
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -51,9 +51,9 @@
|
||||
import spack.util.environment
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
import spack.variant
|
||||
from spack.error import InstallError, NoURLError, PackageError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import PackageTest, TestSuite
|
||||
from spack.solver.version_order import concretization_version_order
|
||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.package_hash import package_hash
|
||||
@@ -299,9 +299,9 @@ def determine_variants(cls, objs, version_str):
|
||||
|
||||
|
||||
class PackageMeta(
|
||||
spack.builder.PhaseCallbacksMeta,
|
||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
||||
DetectablePackageMeta,
|
||||
spack.directives.DirectiveMeta,
|
||||
spack.directives_meta.DirectiveMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
):
|
||||
"""
|
||||
@@ -453,7 +453,7 @@ def _names(when_indexed_dictionary: WhenDict) -> List[str]:
|
||||
return sorted(all_names)
|
||||
|
||||
|
||||
WhenVariantList = List[Tuple["spack.spec.Spec", "spack.variant.Variant"]]
|
||||
WhenVariantList = List[Tuple[spack.spec.Spec, spack.variant.Variant]]
|
||||
|
||||
|
||||
def _remove_overridden_vdefs(variant_defs: WhenVariantList) -> None:
|
||||
@@ -492,41 +492,14 @@ class Hipblas:
|
||||
i += 1
|
||||
|
||||
|
||||
class RedistributionMixin:
|
||||
"""Logic for determining whether a Package is source/binary
|
||||
redistributable.
|
||||
"""
|
||||
|
||||
#: Store whether a given Spec source/binary should not be
|
||||
#: redistributed.
|
||||
disable_redistribute: Dict["spack.spec.Spec", "spack.directives.DisableRedistribute"]
|
||||
|
||||
# Source redistribution must be determined before concretization
|
||||
# (because source mirrors work with un-concretized Specs).
|
||||
@classmethod
|
||||
def redistribute_source(cls, spec):
|
||||
"""Whether it should be possible to add the source of this
|
||||
package to a Spack mirror.
|
||||
"""
|
||||
for when_spec, disable_redistribute in cls.disable_redistribute.items():
|
||||
if disable_redistribute.source and spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def redistribute_binary(self):
|
||||
"""Whether it should be possible to create a binary out of an
|
||||
installed instance of this package.
|
||||
"""
|
||||
for when_spec, disable_redistribute in self.__class__.disable_redistribute.items():
|
||||
if disable_redistribute.binary and self.spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
#: Store whether a given Spec source/binary should not be redistributed.
|
||||
class DisableRedistribute:
|
||||
def __init__(self, source, binary):
|
||||
self.source = source
|
||||
self.binary = binary
|
||||
|
||||
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass=PackageMeta):
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
"""This is the superclass for all spack packages.
|
||||
|
||||
***The Package class***
|
||||
@@ -612,16 +585,20 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
# Declare versions dictionary as placeholder for values.
|
||||
# This allows analysis tools to correctly interpret the class attributes.
|
||||
versions: dict
|
||||
dependencies: Dict["spack.spec.Spec", Dict[str, "spack.dependency.Dependency"]]
|
||||
conflicts: Dict["spack.spec.Spec", List[Tuple["spack.spec.Spec", Optional[str]]]]
|
||||
dependencies: Dict[spack.spec.Spec, Dict[str, spack.dependency.Dependency]]
|
||||
conflicts: Dict[spack.spec.Spec, List[Tuple[spack.spec.Spec, Optional[str]]]]
|
||||
requirements: Dict[
|
||||
"spack.spec.Spec", List[Tuple[Tuple["spack.spec.Spec", ...], str, Optional[str]]]
|
||||
spack.spec.Spec, List[Tuple[Tuple[spack.spec.Spec, ...], str, Optional[str]]]
|
||||
]
|
||||
provided: Dict["spack.spec.Spec", Set["spack.spec.Spec"]]
|
||||
provided_together: Dict["spack.spec.Spec", List[Set[str]]]
|
||||
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
|
||||
variants: Dict["spack.spec.Spec", Dict[str, "spack.variant.Variant"]]
|
||||
languages: Dict["spack.spec.Spec", Set[str]]
|
||||
provided: Dict[spack.spec.Spec, Set[spack.spec.Spec]]
|
||||
provided_together: Dict[spack.spec.Spec, List[Set[str]]]
|
||||
patches: Dict[spack.spec.Spec, List[spack.patch.Patch]]
|
||||
variants: Dict[spack.spec.Spec, Dict[str, spack.variant.Variant]]
|
||||
languages: Dict[spack.spec.Spec, Set[str]]
|
||||
splice_specs: Dict[spack.spec.Spec, Tuple[spack.spec.Spec, Union[None, str, List[str]]]]
|
||||
|
||||
#: Store whether a given Spec source/binary should not be redistributed.
|
||||
disable_redistribute: Dict[spack.spec.Spec, DisableRedistribute]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
@@ -736,11 +713,11 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
test_requires_compiler: bool = False
|
||||
|
||||
#: TestSuite instance used to manage stand-alone tests for 1+ specs.
|
||||
test_suite: Optional["TestSuite"] = None
|
||||
test_suite: Optional[Any] = None
|
||||
|
||||
def __init__(self, spec):
|
||||
# this determines how the package should be built.
|
||||
self.spec: "spack.spec.Spec" = spec
|
||||
self.spec: spack.spec.Spec = spec
|
||||
|
||||
# Allow custom staging paths for packages
|
||||
self.path = None
|
||||
@@ -758,7 +735,7 @@ def __init__(self, spec):
|
||||
# init internal variables
|
||||
self._stage: Optional[StageComposite] = None
|
||||
self._fetcher = None
|
||||
self._tester: Optional["PackageTest"] = None
|
||||
self._tester: Optional[Any] = None
|
||||
|
||||
# Set up timing variables
|
||||
self._fetch_time = 0.0
|
||||
@@ -808,9 +785,7 @@ def variant_definitions(cls, name: str) -> WhenVariantList:
|
||||
return defs
|
||||
|
||||
@classmethod
|
||||
def variant_items(
|
||||
cls,
|
||||
) -> Iterable[Tuple["spack.spec.Spec", Dict[str, "spack.variant.Variant"]]]:
|
||||
def variant_items(cls) -> Iterable[Tuple[spack.spec.Spec, Dict[str, spack.variant.Variant]]]:
|
||||
"""Iterate over ``cls.variants.items()`` with overridden definitions removed."""
|
||||
# Note: This is quadratic in the average number of variant definitions per name.
|
||||
# That is likely close to linear in practice, as there are few variants with
|
||||
@@ -828,7 +803,7 @@ def variant_items(
|
||||
if filtered_variants_by_name:
|
||||
yield when, filtered_variants_by_name
|
||||
|
||||
def get_variant(self, name: str) -> "spack.variant.Variant":
|
||||
def get_variant(self, name: str) -> spack.variant.Variant:
|
||||
"""Get the highest precedence variant definition matching this package's spec.
|
||||
|
||||
Arguments:
|
||||
@@ -1003,6 +978,26 @@ def global_license_file(self):
|
||||
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
||||
)
|
||||
|
||||
# Source redistribution must be determined before concretization (because source mirrors work
|
||||
# with abstract specs).
|
||||
@classmethod
|
||||
def redistribute_source(cls, spec):
|
||||
"""Whether it should be possible to add the source of this
|
||||
package to a Spack mirror."""
|
||||
for when_spec, disable_redistribute in cls.disable_redistribute.items():
|
||||
if disable_redistribute.source and spec.satisfies(when_spec):
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def redistribute_binary(self):
|
||||
"""Whether it should be possible to create a binary out of an installed instance of this
|
||||
package."""
|
||||
for when_spec, disable_redistribute in self.disable_redistribute.items():
|
||||
if disable_redistribute.binary and self.spec.satisfies(when_spec):
|
||||
return False
|
||||
return True
|
||||
|
||||
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
||||
# Python 3.8+, but we still support 3.6.
|
||||
@property
|
||||
@@ -1015,7 +1010,7 @@ def keep_werror(self) -> Optional[str]:
|
||||
* ``"none"``: filter out all ``-Werror*`` flags.
|
||||
* ``None``: respect the user's configuration (``"none"`` by default).
|
||||
"""
|
||||
if self.spec.satisfies("%nvhpc@:23.3") or self.spec.satisfies("%pgi"):
|
||||
if self.spec.satisfies("%nvhpc@:23.3"):
|
||||
# Filtering works by replacing -Werror with -Wno-error, but older nvhpc and
|
||||
# PGI do not understand -Wno-error, so we disable filtering.
|
||||
return "all"
|
||||
@@ -1352,11 +1347,13 @@ def archive_install_test_log(self):
|
||||
|
||||
@property
|
||||
def tester(self):
|
||||
import spack.install_test
|
||||
|
||||
if not self.spec.versions.concrete:
|
||||
raise ValueError("Cannot retrieve tester for package without concrete version.")
|
||||
|
||||
if not self._tester:
|
||||
self._tester = PackageTest(self)
|
||||
self._tester = spack.install_test.PackageTest(self)
|
||||
return self._tester
|
||||
|
||||
@property
|
||||
@@ -2013,72 +2010,58 @@ def build_system_flags(
|
||||
"""
|
||||
return None, None, flags
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: spack.util.environment.EnvironmentModifications) -> None:
|
||||
"""Sets up the run environment for a package.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the package is run. Package authors
|
||||
env: environment modifications to be applied when the package is run. Package authors
|
||||
can call methods on it to alter the run environment.
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
"""Sets up the run environment of packages that depend on this one.
|
||||
|
||||
This is similar to ``setup_run_environment``, but it is used to
|
||||
modify the run environments of packages that *depend* on this one.
|
||||
This is similar to ``setup_run_environment``, but it is used to modify the run environment
|
||||
of a package that *depends* on this one.
|
||||
|
||||
This gives packages like Python and others that follow the extension
|
||||
model a way to implement common environment or run-time settings
|
||||
for dependencies.
|
||||
This gives packages like Python and others that follow the extension model a way to
|
||||
implement common environment or run-time settings for dependencies.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the dependent package is run.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
env: environment modifications to be applied when the dependent package is run. Package
|
||||
authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec (spack.spec.Spec): The spec of the dependent package
|
||||
about to be run. This allows the extendee (self) to query
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
dependent_spec: The spec of the dependent package about to be run. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup_dependent_package(self, module, dependent_spec):
|
||||
"""Set up Python module-scope variables for dependent packages.
|
||||
def setup_dependent_package(self, module, dependent_spec: spack.spec.Spec) -> None:
|
||||
"""Set up module-scope global variables for dependent packages.
|
||||
|
||||
Called before the install() method of dependents.
|
||||
|
||||
Default implementation does nothing, but this can be
|
||||
overridden by an extendable package to set up the module of
|
||||
its extensions. This is useful if there are some common steps
|
||||
to installing all extensions for a certain package.
|
||||
This function is called when setting up the build and run environments of a DAG.
|
||||
|
||||
Examples:
|
||||
|
||||
1. Extensions often need to invoke the ``python`` interpreter
|
||||
from the Python installation being extended. This routine
|
||||
can put a ``python()`` Executable object in the module scope
|
||||
for the extension package to simplify extension installs.
|
||||
1. Extensions often need to invoke the ``python`` interpreter from the Python installation
|
||||
being extended. This routine can put a ``python`` Executable as a global in the module
|
||||
scope for the extension package to simplify extension installs.
|
||||
|
||||
2. MPI compilers could set some variables in the dependent's
|
||||
scope that point to ``mpicc``, ``mpicxx``, etc., allowing
|
||||
them to be called by common name regardless of which MPI is used.
|
||||
|
||||
3. BLAS/LAPACK implementations can set some variables
|
||||
indicating the path to their libraries, since these
|
||||
paths differ by BLAS/LAPACK implementation.
|
||||
2. MPI compilers could set some variables in the dependent's scope that point to ``mpicc``,
|
||||
``mpicxx``, etc., allowing them to be called by common name regardless of which MPI is
|
||||
used.
|
||||
|
||||
Args:
|
||||
module (spack.package_base.PackageBase.module): The Python ``module``
|
||||
object of the dependent package. Packages can use this to set
|
||||
module-scope variables for the dependent to use.
|
||||
module: The Python ``module`` object of the dependent package. Packages can use this to
|
||||
set module-scope variables for the dependent to use.
|
||||
|
||||
dependent_spec (spack.spec.Spec): The spec of the dependent package
|
||||
about to be built. This allows the extendee (self) to
|
||||
query the dependent's state. Note that *this*
|
||||
package's spec is available as ``self.spec``.
|
||||
dependent_spec: The spec of the dependent package about to be built. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``.
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -2105,7 +2088,7 @@ def flag_handler(self, var: FLAG_HANDLER_TYPE) -> None:
|
||||
# arguments. This is implemented for build system classes where
|
||||
# appropriate and will otherwise raise a NotImplementedError.
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
def flags_to_build_system_args(self, flags: Dict[str, List[str]]) -> None:
|
||||
# Takes flags as a dict name: list of values
|
||||
if any(v for v in flags.values()):
|
||||
msg = "The {0} build system".format(self.__class__.__name__)
|
||||
@@ -2308,10 +2291,6 @@ def rpath_args(self):
|
||||
"""
|
||||
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
|
||||
|
||||
@property
|
||||
def builder(self):
|
||||
return spack.builder.create(self)
|
||||
|
||||
|
||||
inject_flags = PackageBase.inject_flags
|
||||
env_flags = PackageBase.env_flags
|
||||
|
105
lib/spack/spack/phase_callbacks.py
Normal file
105
lib/spack/spack/phase_callbacks.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
#: An object of this kind is a shared global state used to collect callbacks during
|
||||
#: class definition time, and is flushed when the class object is created at the end
|
||||
#: of the class definition
|
||||
#:
|
||||
#: Args:
|
||||
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||
CallbackTemporaryStage = collections.namedtuple(
|
||||
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||
)
|
||||
|
||||
#: Shared global state to aggregate "@run_before" callbacks
|
||||
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||
#: Shared global state to aggregate "@run_after" callbacks
|
||||
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||
|
||||
|
||||
class PhaseCallbacksMeta(type):
|
||||
"""Permit to register arbitrary functions during class definition and run them
|
||||
later, before or after a given install phase.
|
||||
|
||||
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||
stored in a global shared state when a class being defined is parsed by the Python
|
||||
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||
of callbacks is attached to the class being defined.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
||||
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def run_after(phase, when=None):
|
||||
"""Decorator to register a function for running after a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase after which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_AFTER.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
@staticmethod
|
||||
def run_before(phase, when=None):
|
||||
"""Decorator to register a function for running before a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase before which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_BEFORE.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
# Export these names as standalone to be used in packages
|
||||
run_after = PhaseCallbacksMeta.run_after
|
||||
run_before = PhaseCallbacksMeta.run_before
|
@@ -13,6 +13,7 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -275,10 +276,10 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
|
||||
# Deduplicate and flatten
|
||||
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
if args:
|
||||
args.append(str(cur_path))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
install_name_tool(*args)
|
||||
with fs.edit_in_place_through_temporary_file(cur_path) as temp_path:
|
||||
install_name_tool(*args, temp_path)
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
@@ -717,8 +718,8 @@ def fixup_macos_rpath(root, filename):
|
||||
# No fixes needed
|
||||
return False
|
||||
|
||||
args.append(abspath)
|
||||
executable.Executable("install_name_tool")(*args)
|
||||
with fs.edit_in_place_through_temporary_file(abspath) as temp_path:
|
||||
executable.Executable("install_name_tool")(*args, temp_path)
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -78,7 +78,8 @@
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"automatic": {"type": "boolean"},
|
||||
},
|
||||
},
|
||||
"duplicates": {
|
||||
@@ -87,6 +88,8 @@
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
"timeout": {"type": "integer", "minimum": 0},
|
||||
"error_on_timeout": {"type": "boolean"},
|
||||
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
|
||||
},
|
||||
}
|
||||
|
@@ -27,6 +27,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -52,6 +53,7 @@
|
||||
|
||||
from .core import (
|
||||
AspFunction,
|
||||
AspVar,
|
||||
NodeArgument,
|
||||
ast_sym,
|
||||
ast_type,
|
||||
@@ -524,12 +526,14 @@ def _compute_specs_from_answer_set(self):
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.build_spec.satisfies(input_spec):
|
||||
if not candidate.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
elif candidate and candidate.build_spec.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
@@ -854,6 +858,8 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||
else:
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
if setup.enable_splicing:
|
||||
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||
|
||||
timer.stop("load")
|
||||
|
||||
@@ -880,7 +886,22 @@ def on_model(model):
|
||||
solve_kwargs["on_unsat"] = cores.append
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
|
||||
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
|
||||
# Spack uses 0 to set no time limit, clingo API uses -1
|
||||
if time_limit == 0:
|
||||
time_limit = -1
|
||||
with self.control.solve(**solve_kwargs, async_=True) as handle:
|
||||
finished = handle.wait(time_limit)
|
||||
if not finished:
|
||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||
if error_on_timeout:
|
||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||
warnings.warn(f"{header}, using the best configuration found so far")
|
||||
handle.cancel()
|
||||
|
||||
solve_result = handle.get()
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
@@ -1166,6 +1187,9 @@ def __init__(self, tests: bool = False):
|
||||
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
|
||||
self.libcs: List[spack.spec.Spec] = []
|
||||
|
||||
# If true, we have to load the code for synthesizing splices
|
||||
self.enable_splicing: bool = spack.config.CONFIG.get("concretizer:splice:automatic")
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
@@ -1336,6 +1360,10 @@ def pkg_rules(self, pkg, tests):
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# splices
|
||||
if self.enable_splicing:
|
||||
self.package_splice_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
@@ -1674,6 +1702,94 @@ def dependency_holds(input_spec, requirements):
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def _gen_match_variant_splice_constraints(
|
||||
self,
|
||||
pkg,
|
||||
cond_spec: "spack.spec.Spec",
|
||||
splice_spec: "spack.spec.Spec",
|
||||
hash_asp_var: "AspVar",
|
||||
splice_node,
|
||||
match_variants: List[str],
|
||||
):
|
||||
# If there are no variants to match, no constraints are needed
|
||||
variant_constraints = []
|
||||
for i, variant_name in enumerate(match_variants):
|
||||
vari_defs = pkg.variant_definitions(variant_name)
|
||||
# the spliceable config of the package always includes the variant
|
||||
if vari_defs != [] and any(cond_spec.satisfies(s) for (s, _) in vari_defs):
|
||||
variant = vari_defs[0][1]
|
||||
if variant.multi:
|
||||
continue # cannot automatically match multi-valued variants
|
||||
value_var = AspVar(f"VariValue{i}")
|
||||
attr_constraint = fn.attr("variant_value", splice_node, variant_name, value_var)
|
||||
hash_attr_constraint = fn.hash_attr(
|
||||
hash_asp_var, "variant_value", splice_spec.name, variant_name, value_var
|
||||
)
|
||||
variant_constraints.append(attr_constraint)
|
||||
variant_constraints.append(hash_attr_constraint)
|
||||
return variant_constraints
|
||||
|
||||
def package_splice_rules(self, pkg):
|
||||
self.gen.h2("Splice rules")
|
||||
for i, (cond, (spec_to_splice, match_variants)) in enumerate(
|
||||
sorted(pkg.splice_specs.items())
|
||||
):
|
||||
with named_spec(cond, pkg.name):
|
||||
self.version_constraints.add((cond.name, cond.versions))
|
||||
self.version_constraints.add((spec_to_splice.name, spec_to_splice.versions))
|
||||
hash_var = AspVar("Hash")
|
||||
splice_node = fn.node(AspVar("NID"), cond.name)
|
||||
when_spec_attrs = [
|
||||
fn.attr(c.args[0], splice_node, *(c.args[2:]))
|
||||
for c in self.spec_clauses(cond, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
splice_spec_hash_attrs = [
|
||||
fn.hash_attr(hash_var, *(c.args))
|
||||
for c in self.spec_clauses(spec_to_splice, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
if match_variants is None:
|
||||
variant_constraints = []
|
||||
elif match_variants == "*":
|
||||
filt_match_variants = set()
|
||||
for map in pkg.variants.values():
|
||||
for k in map:
|
||||
filt_match_variants.add(k)
|
||||
filt_match_variants = list(filt_match_variants)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||
)
|
||||
else:
|
||||
if any(
|
||||
v in cond.variants or v in spec_to_splice.variants for v in match_variants
|
||||
):
|
||||
raise Exception(
|
||||
"Overlap between match_variants and explicitly set variants"
|
||||
)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, match_variants
|
||||
)
|
||||
|
||||
rule_head = fn.abi_splice_conditions_hold(
|
||||
i, splice_node, spec_to_splice.name, hash_var
|
||||
)
|
||||
rule_body_components = (
|
||||
[
|
||||
# splice_set_fact,
|
||||
fn.attr("node", splice_node),
|
||||
fn.installed_hash(spec_to_splice.name, hash_var),
|
||||
]
|
||||
+ when_spec_attrs
|
||||
+ splice_spec_hash_attrs
|
||||
+ variant_constraints
|
||||
)
|
||||
rule_body = ",\n ".join(str(r) for r in rule_body_components)
|
||||
rule = f"{rule_head} :-\n {rule_body}."
|
||||
self.gen.append(rule)
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_preferences(self, pkg_name, func):
|
||||
"""Call func(vspec, provider, i) for each of pkg's provider prefs."""
|
||||
config = spack.config.get("packages")
|
||||
@@ -2536,8 +2652,9 @@ def concrete_specs(self):
|
||||
for h, spec in self.reusable_and_possible.explicit_items():
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
# this describes what constraints it imposes on the solve
|
||||
self.impose(h, spec, body=True)
|
||||
# indirection layer between hash constraints and imposition to allow for splicing
|
||||
for pred in self.spec_clauses(spec, body=True, required_from=None):
|
||||
self.gen.fact(fn.hash_attr(h, *pred.args))
|
||||
self.gen.newline()
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
@@ -3478,6 +3595,14 @@ def consume_facts(self):
|
||||
self._setup.effect_rules()
|
||||
|
||||
|
||||
# This should be a dataclass, but dataclasses don't work on Python 3.6
|
||||
class Splice:
|
||||
def __init__(self, splice_node: NodeArgument, child_name: str, child_hash: str):
|
||||
self.splice_node = splice_node
|
||||
self.child_name = child_name
|
||||
self.child_hash = child_hash
|
||||
|
||||
|
||||
class SpecBuilder:
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
|
||||
@@ -3513,10 +3638,11 @@ def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(
|
||||
self, specs: List[spack.spec.Spec], *, hash_lookup: Optional[ConcreteSpecsByHash] = None
|
||||
):
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
self._specs: Dict[NodeArgument, spack.spec.Spec] = {}
|
||||
|
||||
# Matches parent nodes to splice node
|
||||
self._splices: Dict[NodeArgument, List[Splice]] = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict(
|
||||
@@ -3600,16 +3726,8 @@ def external_spec_selected(self, node, idx):
|
||||
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
depflag = dt.flag_from_string(type)
|
||||
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], depflag=depflag, virtuals=()
|
||||
)
|
||||
else:
|
||||
edges[0].update_deptypes(depflag=depflag)
|
||||
self._specs[parent_node].add_dependency_edge(dependency_spec, depflag=depflag, virtuals=())
|
||||
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
@@ -3726,6 +3844,57 @@ def _order_index(flag_group):
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
|
||||
def splice_at_hash(
|
||||
self,
|
||||
parent_node: NodeArgument,
|
||||
splice_node: NodeArgument,
|
||||
child_name: str,
|
||||
child_hash: str,
|
||||
):
|
||||
splice = Splice(splice_node, child_name=child_name, child_hash=child_hash)
|
||||
self._splices.setdefault(parent_node, []).append(splice)
|
||||
|
||||
def _resolve_automatic_splices(self):
|
||||
"""After all of the specs have been concretized, apply all immediate splices.
|
||||
|
||||
Use reverse topological order to ensure that all dependencies are resolved
|
||||
before their parents, allowing for maximal sharing and minimal copying.
|
||||
|
||||
"""
|
||||
fixed_specs = {}
|
||||
|
||||
# create a mapping from dag hash to an integer representing position in reverse topo order.
|
||||
specs = self._specs.values()
|
||||
topo_order = list(traverse.traverse_nodes(specs, order="topo", key=traverse.by_dag_hash))
|
||||
topo_lookup = {spec.dag_hash(): index for index, spec in enumerate(reversed(topo_order))}
|
||||
|
||||
# iterate over specs, children before parents
|
||||
for node, spec in sorted(self._specs.items(), key=lambda x: topo_lookup[x[1].dag_hash()]):
|
||||
immediate = self._splices.get(node, [])
|
||||
if not immediate and not any(
|
||||
edge.spec in fixed_specs for edge in spec.edges_to_dependencies()
|
||||
):
|
||||
continue
|
||||
new_spec = spec.copy(deps=False)
|
||||
new_spec.build_spec = spec
|
||||
for edge in spec.edges_to_dependencies():
|
||||
depflag = edge.depflag & ~dt.BUILD
|
||||
if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate):
|
||||
splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0]
|
||||
new_spec.add_dependency_edge(
|
||||
self._specs[splice.splice_node], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
elif edge.spec in fixed_specs:
|
||||
new_spec.add_dependency_edge(
|
||||
fixed_specs[edge.spec], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
else:
|
||||
new_spec.add_dependency_edge(
|
||||
edge.spec, depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
self._specs[node] = new_spec
|
||||
fixed_specs[spec] = new_spec
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple) -> Tuple[int, int]:
|
||||
"""Ensure attributes are evaluated in the correct order.
|
||||
@@ -3755,7 +3924,6 @@ def build_specs(self, function_tuples):
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in self.function_tuples:
|
||||
if SpecBuilder.ignored_attributes.match(name):
|
||||
@@ -3785,10 +3953,14 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
# we also need to keep track of splicing information.
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
do_not_ignore_attrs = ["node_flag_source", "splice_at_hash"]
|
||||
if name not in do_not_ignore_attrs:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -3798,7 +3970,7 @@ def build_specs(self, function_tuples):
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = [spec.root for spec in self._specs.values()]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -3814,6 +3986,8 @@ def build_specs(self, function_tuples):
|
||||
for root in roots.values():
|
||||
root._finalize_concretization()
|
||||
|
||||
self._resolve_automatic_splices()
|
||||
|
||||
for s in self._specs.values():
|
||||
spack.spec.Spec.ensure_no_deprecated(s)
|
||||
|
||||
@@ -3828,7 +4002,6 @@ def build_specs(self, function_tuples):
|
||||
)
|
||||
|
||||
specs = self.execute_explicit_splices()
|
||||
|
||||
return specs
|
||||
|
||||
def execute_explicit_splices(self):
|
||||
@@ -4165,7 +4338,6 @@ def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
result = []
|
||||
for reuse_source in self.reuse_sources:
|
||||
result.extend(reuse_source.selected_specs())
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse_strategy == ReuseStrategy.DEPENDENCIES:
|
||||
result = [spec for spec in result if not any(root in spec for root in specs)]
|
||||
@@ -4335,11 +4507,10 @@ def __init__(self, provided, conflicts):
|
||||
|
||||
super().__init__(msg)
|
||||
|
||||
self.provided = provided
|
||||
|
||||
# Add attribute expected of the superclass interface
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
self.provided = provided
|
||||
|
||||
|
||||
class InvalidSpliceError(spack.error.SpackError):
|
||||
|
@@ -1449,25 +1449,71 @@ attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, N
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Installed packages
|
||||
% Installed Packages
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ attr("hash", node(ID, Package), Hash) : installed_hash(Package, Hash) } 1
|
||||
:- attr("node", node(ID, Package)), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
#defined installed_hash/2.
|
||||
#defined abi_splice_conditions_hold/4.
|
||||
|
||||
% These are the previously concretized attributes of the installed package as
|
||||
% a hash. It has the general form:
|
||||
% hash_attr(Hash, Attribute, PackageName, Args*)
|
||||
#defined hash_attr/3.
|
||||
#defined hash_attr/4.
|
||||
#defined hash_attr/5.
|
||||
#defined hash_attr/6.
|
||||
#defined hash_attr/7.
|
||||
|
||||
{ attr("hash", node(ID, PackageName), Hash): installed_hash(PackageName, Hash) } 1 :-
|
||||
attr("node", node(ID, PackageName)),
|
||||
internal_error("Package must resolve to at most 1 hash").
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- attr("hash", PackageNode, Hash), attr("variant_value", PackageNode, "dev_path", _).
|
||||
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", node(ID, Package), Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("node", PackageNode), 2 { attr("hash", PackageNode, Hash) }.
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash).
|
||||
% hash_attrs are versions, but can_splice_attr are usually node_version_satisfies
|
||||
hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :-
|
||||
hash_attr(Hash, "version", PackageName, Version),
|
||||
pkg_fact(PackageName, version_satisfies(Constraint, Version)).
|
||||
|
||||
% This recovers the exact semantics for hash reuse hash and depends_on are where
|
||||
% splices are decided, and virtual_on_edge can result in name-changes, which is
|
||||
% why they are all treated separately.
|
||||
imposed_constraint(Hash, Attr, PackageName) :-
|
||||
hash_attr(Hash, Attr, PackageName).
|
||||
imposed_constraint(Hash, Attr, PackageName, A1) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1), Attr != "hash".
|
||||
imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :-
|
||||
hash_attr(Hash, Attr, PackageName, Arg1, Arg2),
|
||||
Attr != "depends_on",
|
||||
Attr != "virtual_on_edge".
|
||||
imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1, A2, A3).
|
||||
imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash).
|
||||
% Without splicing, we simply recover the exact semantics
|
||||
imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
ChildHash != ParentHash,
|
||||
not abi_splice_conditions_hold(_, _, ChildName, ChildHash).
|
||||
|
||||
imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :-
|
||||
hash_attr(Hash, "depends_on", PackageName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
not attr("splice_at_hash", _, _, DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName),
|
||||
not attr("splice_at_hash", _, _, DepName,_).
|
||||
|
||||
% Rules pertaining to attr("splice_at_hash") and abi_splice_conditions_hold will
|
||||
% be conditionally loaded from splices.lp
|
||||
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash), attr("node", PackageNode).
|
||||
|
||||
% If there is not a hash for a package, we build it.
|
||||
build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode).
|
||||
|
||||
% if we haven't selected a hash for a package, we'll be building it
|
||||
build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode).
|
||||
|
||||
% Minimizing builds is tricky. We want a minimizing criterion
|
||||
|
||||
@@ -1480,6 +1526,7 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode
|
||||
% criteria for built specs -- so that they take precedence over the otherwise
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
%
|
||||
|
||||
% The priority ranges are:
|
||||
% 1000+ Optimizations for concretization errors
|
||||
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||
@@ -1505,12 +1552,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||
not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG
|
||||
#edge (A, B) : depends_on(A, B).
|
||||
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
|
@@ -44,6 +44,17 @@ def _id(thing: Any) -> Union[str, AspObject]:
|
||||
return f'"{str(thing)}"'
|
||||
|
||||
|
||||
class AspVar(AspObject):
|
||||
"""Represents a variable in an ASP rule, allows for conditionally generating
|
||||
rules"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.name)
|
||||
|
||||
|
||||
@lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
"""A term in the ASP logic program"""
|
||||
@@ -88,6 +99,8 @@ def _argify(self, arg: Any) -> Any:
|
||||
return clingo().Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
|
||||
elif isinstance(arg, AspVar):
|
||||
return clingo().Variable(arg.name)
|
||||
return clingo().String(str(arg))
|
||||
|
||||
def symbol(self):
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
#show attr/6.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
||||
|
56
lib/spack/spack/solver/splices.lp
Normal file
56
lib/spack/spack/solver/splices.lp
Normal file
@@ -0,0 +1,56 @@
|
||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% These rules are conditionally loaded to handle the synthesis of spliced
|
||||
% packages.
|
||||
% =============================================================================
|
||||
% Consider the concrete spec:
|
||||
% foo@2.72%gcc@11.4 arch=linux-ubuntu22.04-icelake build_system=autotools ^bar ...
|
||||
% It will emit the following facts for reuse (below is a subset)
|
||||
% installed_hash("foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "hash", "foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "version", "foo", "2.72")
|
||||
% hash_attr("xxxyyy", "node_os", "ubuntu22.04")
|
||||
% hash_attr("xxxyyy", "hash", "bar", "zzzqqq")
|
||||
% hash_attr("xxxyyy", "depends_on", "foo", "bar", "link")
|
||||
% Rules that derive abi_splice_conditions_hold will be generated from
|
||||
% use of the `can_splice` directive. The will have the following form:
|
||||
% can_splice("foo@1.0.0+a", when="@1.0.1+a", match_variants=["b"]) --->
|
||||
% abi_splice_conditions_hold(0, node(SID, "foo"), "foo", BashHash) :-
|
||||
% installed_hash("foo", BaseHash),
|
||||
% attr("node", node(SID, SpliceName)),
|
||||
% attr("node_version_satisfies", node(SID, "foo"), "1.0.1"),
|
||||
% hash_attr("hash", "node_version_satisfies", "foo", "1.0.1"),
|
||||
% attr("variant_value", node(SID, "foo"), "a", "True"),
|
||||
% hash_attr("hash", "variant_value", "foo", "a", "True"),
|
||||
% attr("variant_value", node(SID, "foo"), "b", VariVar0),
|
||||
% hash_attr("hash", "variant_value", "foo", "b", VariVar0),
|
||||
|
||||
% If the splice is valid (i.e. abi_splice_conditions_hold is derived) in the
|
||||
% dependency of a concrete spec the solver free to choose whether to continue
|
||||
% with the exact hash semantics by simply imposing the child hash, or introducing
|
||||
% a spliced node as the dependency instead
|
||||
{ imposed_constraint(ParentHash, "hash", ChildName, ChildHash) } :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash).
|
||||
|
||||
attr("splice_at_hash", ParentNode, node(SID, SpliceName), ChildName, ChildHash) :-
|
||||
attr("hash", ParentNode, ParentHash),
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash),
|
||||
ParentHash != ChildHash,
|
||||
not imposed_constraint(ParentHash, "hash", ChildName, ChildHash).
|
||||
|
||||
% Names and virtual providers may change when a dependency is spliced in
|
||||
imposed_constraint(Hash, "dependency_holds", ParentName, SpliceName, Type) :-
|
||||
hash_attr(Hash, "depends_on", ParentName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", ParentName, SpliceName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", ParentName, DepName, VirtName),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
@@ -59,7 +59,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -95,6 +95,8 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
@@ -2071,7 +2073,7 @@ def _lookup_hash(self):
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(self) if active_env else [])
|
||||
or spack.store.STORE.db.query(self, installed=any)
|
||||
or spack.store.STORE.db.query(self, installed=InstallRecordStatus.ANY)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||
)
|
||||
|
||||
@@ -2828,7 +2830,7 @@ def ensure_no_deprecated(root):
|
||||
msg += " For each package listed, choose another spec\n"
|
||||
raise SpecDeprecatedError(msg)
|
||||
|
||||
def concretize(self, tests: Union[bool, List[str]] = False) -> None:
|
||||
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
||||
"""Concretize the current spec.
|
||||
|
||||
Args:
|
||||
@@ -2907,7 +2909,7 @@ def _mark_concrete(self, value=True):
|
||||
if (not value) and s.concrete and s.installed:
|
||||
continue
|
||||
elif not value:
|
||||
s.clear_cached_hashes()
|
||||
s.clear_caches()
|
||||
s._mark_root_concrete(value)
|
||||
|
||||
def _finalize_concretization(self):
|
||||
@@ -2956,7 +2958,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests=False):
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -4256,7 +4258,7 @@ def _splice_detach_and_add_dependents(self, replacement, context):
|
||||
for ancestor in ancestors_in_context:
|
||||
# Only set it if it hasn't been spliced before
|
||||
ancestor._build_spec = ancestor._build_spec or ancestor.copy()
|
||||
ancestor.clear_cached_hashes(ignore=(ht.package_hash.attr,))
|
||||
ancestor.clear_caches(ignore=(ht.package_hash.attr,))
|
||||
for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD):
|
||||
if edge.depflag & ~dt.BUILD:
|
||||
edge.depflag &= ~dt.BUILD
|
||||
@@ -4450,7 +4452,7 @@ def mask_build_deps(in_spec):
|
||||
|
||||
return spec
|
||||
|
||||
def clear_cached_hashes(self, ignore=()):
|
||||
def clear_caches(self, ignore=()):
|
||||
"""
|
||||
Clears all cached hashes in a Spec, while preserving other properties.
|
||||
"""
|
||||
@@ -4458,7 +4460,9 @@ def clear_cached_hashes(self, ignore=()):
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
self._dunder_hash = None
|
||||
for attr in ("_dunder_hash", "_prefix"):
|
||||
if attr not in ignore:
|
||||
setattr(self, attr, None)
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the process hash and just use
|
||||
|
@@ -16,6 +16,7 @@
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
import llnl.util.symlink
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
can_access,
|
||||
|
247
lib/spack/spack/test/abi_splicing.py
Normal file
247
lib/spack/spack/test/abi_splicing.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
""" Test ABI-based splicing of dependencies """
|
||||
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.solver.asp
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class CacheManager:
|
||||
def __init__(self, specs: List[str]) -> None:
|
||||
self.req_specs = specs
|
||||
self.concr_specs: List[Spec]
|
||||
self.concr_specs = []
|
||||
|
||||
def __enter__(self):
|
||||
self.concr_specs = [Spec(s).concretized() for s in self.req_specs]
|
||||
for s in self.concr_specs:
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
for s in self.concr_specs:
|
||||
s.package.do_uninstall()
|
||||
|
||||
|
||||
# MacOS and Windows only work if you pass this function pointer rather than a
|
||||
# closure
|
||||
def _mock_has_runtime_dependencies(_x):
|
||||
return True
|
||||
|
||||
|
||||
def _make_specs_non_buildable(specs: List[str]):
|
||||
output_config = {}
|
||||
for spec in specs:
|
||||
output_config[spec] = {"buildable": False}
|
||||
return output_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def splicing_setup(mutable_database, mock_packages, monkeypatch):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies
|
||||
)
|
||||
|
||||
|
||||
def _enable_splicing():
|
||||
spack.config.set("concretizer:splice", {"automatic": True})
|
||||
|
||||
|
||||
def _has_build_dependency(spec: Spec, name: str):
|
||||
return any(s.name == name for s in spec.dependencies(None, dt.BUILD))
|
||||
|
||||
|
||||
def test_simple_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-z").concretized().satisfies(Spec("splice-z"))
|
||||
|
||||
|
||||
def test_simple_dep_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1"))
|
||||
|
||||
|
||||
def test_splice_installed_hash(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.0",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
|
||||
spack.config.set("packages", packages_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_splice_build_splice_node(splicing_setup):
|
||||
with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_double_splice(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
|
||||
"splice-z@1.0.2+compat",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"])
|
||||
spack.config.set("packages", freeze_builds_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
# The next two tests are mirrors of one another
|
||||
def test_virtual_multi_splices_in(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
for gs in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_virtual_multi_can_be_spliced(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
with pytest.raises(Exception):
|
||||
for gs in goal_specs:
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"),
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for goal in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
goal.concretized()
|
||||
_enable_splicing()
|
||||
for goal in goal_specs:
|
||||
assert goal.concretized().satisfies(goal)
|
||||
|
||||
|
||||
def test_manyvariant_limited_matching(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
|
||||
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"),
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for s in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
s.concretized()
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_external_splice_same_name(splicing_setup):
|
||||
cache = [
|
||||
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
|
||||
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
|
||||
]
|
||||
packages_yaml = {
|
||||
"splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]}
|
||||
}
|
||||
goal_specs = [
|
||||
Spec("splice-h@1.0.0 ^splice-z@1.0.2"),
|
||||
Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", packages_yaml)
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
||||
cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"]
|
||||
goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
|
||||
|
||||
with CacheManager(cache):
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
build_spec = concr_goal._build_spec
|
||||
# Spec has been spliced
|
||||
assert build_spec is not None
|
||||
# Build spec has spliced build dependencies
|
||||
assert _has_build_dependency(build_spec, "splice-h")
|
||||
assert _has_build_dependency(build_spec, "splice-z")
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
||||
|
||||
|
||||
def test_spliced_transitive_dependency(splicing_setup):
|
||||
cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"]
|
||||
goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2")
|
||||
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
# Spec has been spliced
|
||||
assert concr_goal._build_spec is not None
|
||||
assert concr_goal["splice-t"]._build_spec is not None
|
||||
assert concr_goal.satisfies(goal_spec)
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
@@ -15,6 +15,7 @@
|
||||
|
||||
import spack.build_systems.autotools
|
||||
import spack.build_systems.cmake
|
||||
import spack.builder
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@@ -149,7 +150,7 @@ def test_libtool_archive_files_are_deleted_by_default(self, mutable_database):
|
||||
|
||||
# Assert the libtool archive is not there and we have
|
||||
# a log of removed files
|
||||
assert not os.path.exists(s.package.builder.libtool_archive_file)
|
||||
assert not os.path.exists(spack.builder.create(s.package).libtool_archive_file)
|
||||
search_directory = os.path.join(s.prefix, ".spack")
|
||||
libtool_deletion_log = fs.find(search_directory, "removed_la_files.txt", recursive=True)
|
||||
assert libtool_deletion_log
|
||||
@@ -160,11 +161,13 @@ def test_libtool_archive_files_might_be_installed_on_demand(
|
||||
# Install a package that creates a mock libtool archive,
|
||||
# patch its package to preserve the installation
|
||||
s = Spec("libtool-deletion").concretized()
|
||||
monkeypatch.setattr(type(s.package.builder), "install_libtool_archives", True)
|
||||
monkeypatch.setattr(
|
||||
type(spack.builder.create(s.package)), "install_libtool_archives", True
|
||||
)
|
||||
PackageInstaller([s.package], explicit=True).install()
|
||||
|
||||
# Assert libtool archives are installed
|
||||
assert os.path.exists(s.package.builder.libtool_archive_file)
|
||||
assert os.path.exists(spack.builder.create(s.package).libtool_archive_file)
|
||||
|
||||
def test_autotools_gnuconfig_replacement(self, mutable_database):
|
||||
"""
|
||||
@@ -261,7 +264,7 @@ def test_cmake_std_args(self, default_mock_concretization):
|
||||
# Call the function on a CMakePackage instance
|
||||
s = default_mock_concretization("cmake-client")
|
||||
expected = spack.build_systems.cmake.CMakeBuilder.std_args(s.package)
|
||||
assert s.package.builder.std_cmake_args == expected
|
||||
assert spack.builder.create(s.package).std_cmake_args == expected
|
||||
|
||||
# Call it on another kind of package
|
||||
s = default_mock_concretization("mpich")
|
||||
@@ -381,7 +384,9 @@ def test_autotools_args_from_conditional_variant(default_mock_concretization):
|
||||
is not met. When this is the case, the variant is not set in the spec."""
|
||||
s = default_mock_concretization("autotools-conditional-variants-test")
|
||||
assert "example" not in s.variants
|
||||
assert len(s.package.builder._activate_or_not("example", "enable", "disable")) == 0
|
||||
assert (
|
||||
len(spack.builder.create(s.package)._activate_or_not("example", "enable", "disable")) == 0
|
||||
)
|
||||
|
||||
|
||||
def test_autoreconf_search_path_args_multiple(default_mock_concretization, tmpdir):
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.ci as ci
|
||||
import spack.cmd
|
||||
import spack.cmd.ci
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
|
||||
install = SpackCommand("install")
|
||||
@@ -26,7 +26,7 @@ def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||
|
||||
non_deprecated = spack.store.STORE.db.query()
|
||||
all_available = spack.store.STORE.db.query(installed=any)
|
||||
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||
assert all_available == all_installed
|
||||
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
|
||||
|
||||
@@ -56,7 +56,7 @@ def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mock
|
||||
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
|
||||
|
||||
non_deprecated = spack.store.STORE.db.query()
|
||||
deprecated = spack.store.STORE.db.query(installed=InstallStatuses.DEPRECATED)
|
||||
deprecated = spack.store.STORE.db.query(installed=InstallRecordStatus.DEPRECATED)
|
||||
assert deprecated == to_deprecate
|
||||
assert len(non_deprecated) == 1
|
||||
assert non_deprecated[0].satisfies("libelf@0.8.13")
|
||||
@@ -75,8 +75,8 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
|
||||
deprecate("-y", "-d", "libdwarf@20130207", "libdwarf@20130729")
|
||||
|
||||
non_deprecated = spack.store.STORE.db.query()
|
||||
all_available = spack.store.STORE.db.query(installed=any)
|
||||
deprecated = spack.store.STORE.db.query(installed=InstallStatuses.DEPRECATED)
|
||||
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||
deprecated = spack.store.STORE.db.query(installed=InstallRecordStatus.DEPRECATED)
|
||||
|
||||
assert all_available == all_installed
|
||||
assert sorted(all_available) == sorted(deprecated + non_deprecated)
|
||||
@@ -96,7 +96,9 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_m
|
||||
|
||||
uninstall("-y", "libelf@0.8.10")
|
||||
|
||||
assert spack.store.STORE.db.query() == spack.store.STORE.db.query(installed=any)
|
||||
assert spack.store.STORE.db.query() == spack.store.STORE.db.query(
|
||||
installed=InstallRecordStatus.ANY
|
||||
)
|
||||
assert spack.store.STORE.db.query() == non_deprecated
|
||||
|
||||
|
||||
@@ -116,7 +118,7 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
|
||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||
|
||||
non_deprecated = spack.store.STORE.db.query()
|
||||
all_available = spack.store.STORE.db.query(installed=any)
|
||||
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||
assert len(non_deprecated) == 2
|
||||
assert len(all_available) == 3
|
||||
|
||||
@@ -143,7 +145,7 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
|
||||
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
|
||||
|
||||
non_deprecated = spack.store.STORE.db.query()
|
||||
all_available = spack.store.STORE.db.query(installed=any)
|
||||
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||
assert len(non_deprecated) == 1
|
||||
assert len(all_available) == 3
|
||||
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import create_test_repo
|
||||
@@ -75,7 +76,7 @@ def test_query_arguments():
|
||||
assert "installed" in q_args
|
||||
assert "predicate_fn" in q_args
|
||||
assert "explicit" in q_args
|
||||
assert q_args["installed"] == ["installed"]
|
||||
assert q_args["installed"] == InstallRecordStatus.INSTALLED
|
||||
assert q_args["predicate_fn"] is None
|
||||
assert q_args["explicit"] is None
|
||||
assert "start_date" in q_args
|
||||
|
@@ -4,10 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.solver.asp as asp
|
||||
import spack.store
|
||||
from spack.cmd import (
|
||||
CommandNameError,
|
||||
PythonNameError,
|
||||
cmd_name,
|
||||
matching_specs_from_env,
|
||||
parse_specs,
|
||||
python_name,
|
||||
require_cmd_name,
|
||||
require_python_name,
|
||||
@@ -34,3 +41,99 @@ def test_require_cmd_name():
|
||||
with pytest.raises(CommandNameError):
|
||||
require_cmd_name("okey_dokey")
|
||||
require_cmd_name(cmd_name("okey_dokey"))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_parse_specs(
|
||||
unify, spec_strs, error, monkeypatch, mutable_config, mutable_database, tmpdir
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
parse_specs(args, concretize=True)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
parse_specs(args, concretize=True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_matching_specs_from_env(
|
||||
unify,
|
||||
spec_strs,
|
||||
error,
|
||||
monkeypatch,
|
||||
mutable_config,
|
||||
mutable_database,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
ev.create("test")
|
||||
env = ev.read("test")
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
with env:
|
||||
specs = parse_specs(args, concretize=False)
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
matching_specs_from_env(specs)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
matching_specs_from_env(specs)
|
||||
|
@@ -10,6 +10,7 @@
|
||||
|
||||
from llnl.util.filesystem import mkdirp, working_dir
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.pkg
|
||||
import spack.main
|
||||
import spack.paths
|
||||
@@ -311,7 +312,20 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"]
|
||||
for name in [
|
||||
"depends-on-manyvariants",
|
||||
"manyvariants",
|
||||
"splice-a",
|
||||
"splice-depends-on-t",
|
||||
"splice-h",
|
||||
"splice-t",
|
||||
"splice-vh",
|
||||
"splice-vt",
|
||||
"splice-z",
|
||||
"virtual-abi-1",
|
||||
"virtual-abi-2",
|
||||
"virtual-abi-multi",
|
||||
]
|
||||
)
|
||||
|
||||
# ensure that this string isn't fouhnd
|
||||
|
@@ -6,6 +6,7 @@
|
||||
|
||||
import spack.store
|
||||
from spack.database import Database
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
|
||||
install = SpackCommand("install")
|
||||
@@ -57,18 +58,18 @@ def test_reindex_with_deprecated_packages(
|
||||
|
||||
db = spack.store.STORE.db
|
||||
|
||||
all_installed = db.query(installed=any)
|
||||
all_installed = db.query(installed=InstallRecordStatus.ANY)
|
||||
non_deprecated = db.query(installed=True)
|
||||
|
||||
_clear_db(tmp_path)
|
||||
|
||||
reindex()
|
||||
|
||||
assert db.query(installed=any) == all_installed
|
||||
assert db.query(installed=InstallRecordStatus.ANY) == all_installed
|
||||
assert db.query(installed=True) == non_deprecated
|
||||
|
||||
old_libelf = db.query_local_by_spec_hash(
|
||||
db.query_local("libelf@0.8.12", installed=any)[0].dag_hash()
|
||||
db.query_local("libelf@0.8.12", installed=InstallRecordStatus.ANY)[0].dag_hash()
|
||||
)
|
||||
new_libelf = db.query_local_by_spec_hash(
|
||||
db.query_local("libelf@0.8.13", installed=True)[0].dag_hash()
|
||||
|
@@ -7,6 +7,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
@@ -179,3 +180,43 @@ def test_spec_version_assigned_git_ref_as_version(name, version, error):
|
||||
else:
|
||||
output = spec(name + "@" + version)
|
||||
assert version in output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify, spec_hash_args, match, error",
|
||||
[
|
||||
# success cases with unfiy:true
|
||||
(True, ["mpileaks_mpich"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(True, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# same success cases with unfiy:false
|
||||
(False, ["mpileaks_mpich"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(False, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# cases with unfiy:false
|
||||
(True, ["mpileaks_mpich", "mpileaks_zmpi"], "callpath, mpileaks", spack.error.SpecError),
|
||||
(False, ["mpileaks_mpich", "mpileaks_zmpi"], "zmpi", None),
|
||||
],
|
||||
)
|
||||
def test_spec_unification_from_cli(
|
||||
install_mockery, mutable_config, mutable_database, unify, spec_hash_args, match, error
|
||||
):
|
||||
"""Ensure specs grouped together on the CLI are concretized together when unify:true."""
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
db = spack.store.STORE.db
|
||||
spec_lookup = {
|
||||
"mpileaks_mpich": db.query_one("mpileaks ^mpich").dag_hash(),
|
||||
"mpileaks_zmpi": db.query_one("mpileaks ^zmpi").dag_hash(),
|
||||
"dyninst": db.query_one("dyninst").dag_hash(),
|
||||
}
|
||||
|
||||
hashes = [f"/{spec_lookup[name]}" for name in spec_hash_args]
|
||||
if error:
|
||||
with pytest.raises(error, match=match):
|
||||
output = spec(*hashes)
|
||||
else:
|
||||
output = spec(*hashes)
|
||||
assert match in output
|
||||
|
@@ -4,8 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -15,7 +18,7 @@
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.cmd.style import changed_files
|
||||
from spack.cmd.style import _run_import_check, changed_files
|
||||
from spack.util.executable import which
|
||||
|
||||
#: directory with sample style files
|
||||
@@ -292,5 +295,114 @@ def test_style_with_black(flake8_package_with_errors):
|
||||
|
||||
|
||||
def test_skip_tools():
|
||||
output = style("--skip", "isort,mypy,black,flake8")
|
||||
output = style("--skip", "import,isort,mypy,black,flake8")
|
||||
assert "Nothing to run" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires Python 3.9+")
|
||||
def test_run_import_check(tmp_path: pathlib.Path):
|
||||
file = tmp_path / "issues.py"
|
||||
contents = '''
|
||||
import spack.cmd
|
||||
import spack.config # do not drop this import because of this comment
|
||||
|
||||
# this comment about spack.error should not be removed
|
||||
class Example(spack.build_systems.autotools.AutotoolsPackage):
|
||||
"""this is a docstring referencing unused spack.error.SpackError, which is fine"""
|
||||
pass
|
||||
|
||||
def foo(config: "spack.error.SpackError"):
|
||||
# the type hint is quoted, so it should not be removed
|
||||
spack.util.executable.Executable("example")
|
||||
print(spack.__version__)
|
||||
'''
|
||||
file.write_text(contents)
|
||||
root = str(tmp_path)
|
||||
output_buf = io.StringIO()
|
||||
exit_code = _run_import_check(
|
||||
[str(file)],
|
||||
fix=False,
|
||||
out=output_buf,
|
||||
root_relative=False,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=root,
|
||||
)
|
||||
output = output_buf.getvalue()
|
||||
|
||||
assert "issues.py: redundant import: spack.cmd" in output
|
||||
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
|
||||
assert "issues.py: missing import: spack" in output # used by spack.__version__
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
assert "issues.py: missing import: spack.error" not in output # not directly used
|
||||
assert exit_code == 1
|
||||
assert file.read_text() == contents # fix=False should not change the file
|
||||
|
||||
# run it with --fix, should have the same output.
|
||||
output_buf = io.StringIO()
|
||||
exit_code = _run_import_check(
|
||||
[str(file)],
|
||||
fix=True,
|
||||
out=output_buf,
|
||||
root_relative=False,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=root,
|
||||
)
|
||||
output = output_buf.getvalue()
|
||||
assert exit_code == 1
|
||||
assert "issues.py: redundant import: spack.cmd" in output
|
||||
assert "issues.py: missing import: spack" in output
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
|
||||
# after fix a second fix is idempotent
|
||||
output_buf = io.StringIO()
|
||||
exit_code = _run_import_check(
|
||||
[str(file)],
|
||||
fix=True,
|
||||
out=output_buf,
|
||||
root_relative=False,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=root,
|
||||
)
|
||||
output = output_buf.getvalue()
|
||||
assert exit_code == 0
|
||||
assert not output
|
||||
|
||||
# check that the file was fixed
|
||||
new_contents = file.read_text()
|
||||
assert "import spack.cmd" not in new_contents
|
||||
assert "import spack\n" in new_contents
|
||||
assert "import spack.build_systems.autotools\n" in new_contents
|
||||
assert "import spack.util.executable\n" in new_contents
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires Python 3.9+")
|
||||
def test_run_import_check_syntax_error_and_missing(tmp_path: pathlib.Path):
|
||||
(tmp_path / "syntax-error.py").write_text("""this 'is n(ot python code""")
|
||||
output_buf = io.StringIO()
|
||||
exit_code = _run_import_check(
|
||||
[str(tmp_path / "syntax-error.py"), str(tmp_path / "missing.py")],
|
||||
fix=False,
|
||||
out=output_buf,
|
||||
root_relative=True,
|
||||
root=str(tmp_path),
|
||||
working_dir=str(tmp_path / "does-not-matter"),
|
||||
)
|
||||
output = output_buf.getvalue()
|
||||
assert "syntax-error.py: could not parse" in output
|
||||
assert "missing.py: could not parse" in output
|
||||
assert exit_code == 1
|
||||
|
||||
|
||||
def test_case_sensitive_imports(tmp_path: pathlib.Path):
|
||||
# example.Example is a name, while example.example is a module.
|
||||
(tmp_path / "lib" / "spack" / "example").mkdir(parents=True)
|
||||
(tmp_path / "lib" / "spack" / "example" / "__init__.py").write_text("class Example:\n pass")
|
||||
(tmp_path / "lib" / "spack" / "example" / "example.py").write_text("foo = 1")
|
||||
assert spack.cmd.style._module_part(str(tmp_path), "example.Example") == "example"
|
||||
|
||||
|
||||
def test_pkg_imports():
|
||||
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg.builtin.boost") is None
|
||||
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg") is None
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import spack.cmd.uninstall
|
||||
import spack.environment
|
||||
import spack.store
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
uninstall = SpackCommand("uninstall")
|
||||
@@ -129,10 +130,10 @@ def validate_callpath_spec(installed):
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=installed)
|
||||
assert len(specs) == 1 and specs[0] == callpath_spec
|
||||
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=any)
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=InstallRecordStatus.ANY)
|
||||
assert len(specs) == 1 and specs[0] == callpath_spec
|
||||
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=any)
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=InstallRecordStatus.ANY)
|
||||
assert len(specs) == 1 and specs[0] == callpath_spec
|
||||
|
||||
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=not installed)
|
||||
@@ -147,7 +148,7 @@ def validate_callpath_spec(installed):
|
||||
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=installed)
|
||||
assert spec == callpath_spec
|
||||
|
||||
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=any)
|
||||
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=InstallRecordStatus.ANY)
|
||||
assert spec == callpath_spec
|
||||
|
||||
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=not installed)
|
||||
|
@@ -537,22 +537,6 @@ def test_nvhpc_flags():
|
||||
supported_flag_test("stdcxx_libs", ("-c++libs",), "nvhpc@=20.9")
|
||||
|
||||
|
||||
def test_pgi_flags():
|
||||
supported_flag_test("openmp_flag", "-mp", "pgi@=1.0")
|
||||
supported_flag_test("cxx11_flag", "-std=c++11", "pgi@=1.0")
|
||||
unsupported_flag_test("c99_flag", "pgi@=12.9")
|
||||
supported_flag_test("c99_flag", "-c99", "pgi@=12.10")
|
||||
unsupported_flag_test("c11_flag", "pgi@=15.2")
|
||||
supported_flag_test("c11_flag", "-c11", "pgi@=15.3")
|
||||
supported_flag_test("cc_pic_flag", "-fpic", "pgi@=1.0")
|
||||
supported_flag_test("cxx_pic_flag", "-fpic", "pgi@=1.0")
|
||||
supported_flag_test("f77_pic_flag", "-fpic", "pgi@=1.0")
|
||||
supported_flag_test("fc_pic_flag", "-fpic", "pgi@=1.0")
|
||||
supported_flag_test("stdcxx_libs", ("-pgc++libs",), "pgi@=1.0")
|
||||
supported_flag_test("debug_flags", ["-g", "-gopt"], "pgi@=1.0")
|
||||
supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"], "pgi@=1.0")
|
||||
|
||||
|
||||
def test_xl_flags():
|
||||
supported_flag_test("openmp_flag", "-qsmp=omp", "xl@=1.0")
|
||||
unsupported_flag_test("cxx11_flag", "xl@=13.0")
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, touch, touchp
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.environment as ev
|
||||
|
@@ -38,7 +38,7 @@
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.directives
|
||||
import spack.directives_meta
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.modules.common
|
||||
@@ -1754,7 +1754,7 @@ def clear_directive_functions():
|
||||
# Make sure any directive functions overidden by tests are cleared before
|
||||
# proceeding with subsequent tests that may depend on the original
|
||||
# functions.
|
||||
spack.directives.DirectiveMeta._directives_to_be_executed = []
|
||||
spack.directives_meta.DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@@ -1,11 +0,0 @@
|
||||
Export PGI=/usr/tce/packages/pgi/pgi-16.3
|
||||
|
||||
/usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/bin/pgc test.c -opt 1 -x 119 0xa10000 -x 122 0x40 -x 123 0x1000 -x 127 4 -x 127 17 -x 19 0x400000 -x 28 0x40000 -x 120 0x10000000 -x 70 0x8000 -x 122 1 -x 125 0x20000 -quad -x 59 4 -tp haswell -x 120 0x1000 -astype 0 -stdinc /usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/include-gcc48:/usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/include:/usr/lib/gcc/x86_64-redhat-linux/4.8.5/include:/usr/local/include:/usr/include -def unix -def __unix -def __unix__ -def linux -def __linux -def __linux__ -def __NO_MATH_INLINES -def __LP64__ -def __x86_64 -def __x86_64__ -def __LONG_MAX__=9223372036854775807L -def '__SIZE_TYPE__=unsigned long int' -def '__PTRDIFF_TYPE__=long int' -def __THROW= -def __extension__= -def __amd_64__amd64__ -def __k8 -def __k8__ -def __SSE__ -def __MMX__ -def __SSE2__ -def __SSE3__ -def __SSSE3__ -def __STDC_HOSTED__ -predicate '#machine(x86_64) #lint(off) #system(posix) #cpu(x86_64)' -cmdline '+pgcc test.c -v -o test.o' -x 123 0x80000000 -x 123 4 -x 2 0x400 -x 119 0x20 -def __pgnu_vsn=40805 -x 120 0x200000 -x 70 0x40000000 -y 163 0xc0000000 -x 189 0x10 -y 189 0x4000000 -asm /var/tmp/gamblin2/pgccL0MCVCOQsq6l.s
|
||||
PGC/x86-64 Linux 16.3-0: compilation successful
|
||||
|
||||
/usr/bin/as /var/tmp/gamblin2/pgccL0MCVCOQsq6l.s -o /var/tmp/gamblin2/pgcc10MCFxmYXjgo.o
|
||||
|
||||
/usr/tce/bin/ld /usr/lib64/crt1.o /usr/lib64/crti.o /usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib/trace_init.o /usr/lib/gcc/x86_64-redhat-linux/4.8.5/crtbegin.o /usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib/initmp.o --eh-frame-hdr -m elf_x86_64 -dynamic-linker /lib64/ld-linux-x86-64.so.2 /usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib/pgi.ld -L/usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib -L/usr/lib64 -L/usr/lib/gcc/x86_64-redhat-linux/4.8.5 /var/tmp/gamblin2/pgcc10MCFxmYXjgo.o -rpath /usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib -o test.o -lpgmp -lnuma -lpthread -lnspgc -lpgc -lm -lgcc -lc -lgcc /usr/lib/gcc/x86_64-redhat-linux/4.8.5/crtend.o /usr/lib64/crtn.o
|
||||
Unlinking /var/tmp/gamblin2/pgccL0MCVCOQsq6l.s
|
||||
Unlinking /var/tmp/gamblin2/pgccn0MCNcmgIbh8.ll
|
||||
Unlinking /var/tmp/gamblin2/pgcc10MCFxmYXjgo.o
|
@@ -34,6 +34,7 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.version as vn
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.database_index import schema
|
||||
from spack.util.executable import Executable
|
||||
@@ -292,7 +293,7 @@ def _print_ref_counts():
|
||||
recs = []
|
||||
|
||||
def add_rec(spec):
|
||||
cspecs = spack.store.STORE.db.query(spec, installed=any)
|
||||
cspecs = spack.store.STORE.db.query(spec, installed=InstallRecordStatus.ANY)
|
||||
|
||||
if not cspecs:
|
||||
recs.append("[ %-7s ] %-20s-" % ("", spec))
|
||||
@@ -324,7 +325,7 @@ def add_rec(spec):
|
||||
|
||||
def _check_merkleiness():
|
||||
"""Ensure the spack database is a valid merkle graph."""
|
||||
all_specs = spack.store.STORE.db.query(installed=any)
|
||||
all_specs = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||
|
||||
seen = {}
|
||||
for spec in all_specs:
|
||||
@@ -617,7 +618,7 @@ def test_080_root_ref_counts(mutable_database):
|
||||
mutable_database.remove("mpileaks ^mpich")
|
||||
|
||||
# record no longer in DB
|
||||
assert mutable_database.query("mpileaks ^mpich", installed=any) == []
|
||||
assert mutable_database.query("mpileaks ^mpich", installed=InstallRecordStatus.ANY) == []
|
||||
|
||||
# record's deps have updated ref_counts
|
||||
assert mutable_database.get_record("callpath ^mpich").ref_count == 0
|
||||
@@ -627,7 +628,7 @@ def test_080_root_ref_counts(mutable_database):
|
||||
mutable_database.add(rec.spec)
|
||||
|
||||
# record is present again
|
||||
assert len(mutable_database.query("mpileaks ^mpich", installed=any)) == 1
|
||||
assert len(mutable_database.query("mpileaks ^mpich", installed=InstallRecordStatus.ANY)) == 1
|
||||
|
||||
# dependencies have ref counts updated
|
||||
assert mutable_database.get_record("callpath ^mpich").ref_count == 1
|
||||
@@ -643,18 +644,21 @@ def test_090_non_root_ref_counts(mutable_database):
|
||||
|
||||
# record still in DB but marked uninstalled
|
||||
assert mutable_database.query("callpath ^mpich", installed=True) == []
|
||||
assert len(mutable_database.query("callpath ^mpich", installed=any)) == 1
|
||||
assert len(mutable_database.query("callpath ^mpich", installed=InstallRecordStatus.ANY)) == 1
|
||||
|
||||
# record and its deps have same ref_counts
|
||||
assert mutable_database.get_record("callpath ^mpich", installed=any).ref_count == 1
|
||||
assert (
|
||||
mutable_database.get_record("callpath ^mpich", installed=InstallRecordStatus.ANY).ref_count
|
||||
== 1
|
||||
)
|
||||
assert mutable_database.get_record("mpich").ref_count == 2
|
||||
|
||||
# remove only dependent of uninstalled callpath record
|
||||
mutable_database.remove("mpileaks ^mpich")
|
||||
|
||||
# record and parent are completely gone.
|
||||
assert mutable_database.query("mpileaks ^mpich", installed=any) == []
|
||||
assert mutable_database.query("callpath ^mpich", installed=any) == []
|
||||
assert mutable_database.query("mpileaks ^mpich", installed=InstallRecordStatus.ANY) == []
|
||||
assert mutable_database.query("callpath ^mpich", installed=InstallRecordStatus.ANY) == []
|
||||
|
||||
# mpich ref count updated properly.
|
||||
mpich_rec = mutable_database.get_record("mpich")
|
||||
@@ -668,14 +672,14 @@ def fail_while_writing():
|
||||
raise Exception()
|
||||
|
||||
with database.read_transaction():
|
||||
assert len(database.query("mpileaks ^zmpi", installed=any)) == 1
|
||||
assert len(database.query("mpileaks ^zmpi", installed=InstallRecordStatus.ANY)) == 1
|
||||
|
||||
with pytest.raises(Exception):
|
||||
fail_while_writing()
|
||||
|
||||
# reload DB and make sure zmpi is still there.
|
||||
with database.read_transaction():
|
||||
assert len(database.query("mpileaks ^zmpi", installed=any)) == 1
|
||||
assert len(database.query("mpileaks ^zmpi", installed=InstallRecordStatus.ANY)) == 1
|
||||
|
||||
|
||||
def test_110_no_write_with_exception_on_install(database):
|
||||
@@ -685,14 +689,14 @@ def fail_while_writing():
|
||||
raise Exception()
|
||||
|
||||
with database.read_transaction():
|
||||
assert database.query("cmake", installed=any) == []
|
||||
assert database.query("cmake", installed=InstallRecordStatus.ANY) == []
|
||||
|
||||
with pytest.raises(Exception):
|
||||
fail_while_writing()
|
||||
|
||||
# reload DB and make sure cmake was not written.
|
||||
with database.read_transaction():
|
||||
assert database.query("cmake", installed=any) == []
|
||||
assert database.query("cmake", installed=InstallRecordStatus.ANY) == []
|
||||
|
||||
|
||||
def test_115_reindex_with_packages_not_in_repo(mutable_database, tmpdir):
|
||||
|
@@ -73,5 +73,18 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
|
||||
o | libdwarf
|
||||
|/
|
||||
o libelf
|
||||
"""
|
||||
or graph_str
|
||||
== r"""o mpileaks
|
||||
|\
|
||||
| o callpath
|
||||
|/|
|
||||
| o dyninst
|
||||
| |\
|
||||
o | | mpich
|
||||
/ /
|
||||
| o libdwarf
|
||||
|/
|
||||
o libelf
|
||||
"""
|
||||
)
|
||||
|
@@ -69,17 +69,6 @@ def test_icc16_link_paths():
|
||||
)
|
||||
|
||||
|
||||
def test_pgi_link_paths():
|
||||
check_link_paths(
|
||||
"pgcc-16.3.txt",
|
||||
[
|
||||
os.path.join(
|
||||
root, "usr", "tce", "packages", "pgi", "pgi-16.3", "linux86-64", "16.3", "lib"
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_gcc7_link_paths():
|
||||
check_link_paths("gcc-7.3.1.txt", [])
|
||||
|
||||
|
@@ -1249,3 +1249,14 @@ def test_find_input_types(tmp_path: pathlib.Path):
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(1, "file.txt") # type: ignore
|
||||
|
||||
|
||||
def test_edit_in_place_through_temporary_file(tmp_path):
|
||||
(tmp_path / "example.txt").write_text("Hello")
|
||||
current_ino = os.stat(tmp_path / "example.txt").st_ino
|
||||
with fs.edit_in_place_through_temporary_file(tmp_path / "example.txt") as temporary:
|
||||
os.unlink(temporary)
|
||||
with open(temporary, "w") as f:
|
||||
f.write("World")
|
||||
assert (tmp_path / "example.txt").read_text() == "World"
|
||||
assert os.stat(tmp_path / "example.txt").st_ino == current_ino
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.modules.tcl
|
||||
import spack.package_base
|
||||
|
@@ -21,6 +21,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.install_test
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user